2018-07-21T05:19:50,873 INFO [main] control.CoreCliDriver: Starting org.apache.hadoop.hive.cli.control.CoreCliDriver run at 1532175590866 2018-07-21T05:19:50,916 INFO [main] QTestUtil: Setting up QTestUtil with outDir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/ql/src/test/results/clientpositive/druid, logDir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/log, clusterType=druid, confDir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/conf/llap, hadoopVer=3.1.0, initScript=q_test_druid_init.sql, cleanupScript=q_test_cleanup_druid.sql, withLlapIo=true, fsType=hdfs 2018-07-21T05:19:50,948 INFO [main] conf.HiveConf: Found configuration file file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf/hive-site.xml 2018-07-21T05:19:51,586 DEBUG [main] util.Shell: Failed to detect a valid hadoop home directory java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:469) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:440) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.util.Shell.(Shell.java:517) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.util.StringUtils.(StringUtils.java:78) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.conf.Configuration.getBoolean(Configuration.java:1661) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.SecurityUtil.setConfigurationInternal(SecurityUtil.java:102) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.SecurityUtil.(SecurityUtil.java:86) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:310) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:298) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.doSubjectLogin(UserGroupInformation.java:1780) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.createLoginUser(UserGroupInformation.java:704) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:654) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:565) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.conf.Configuration$Resource.getRestrictParserDefault(Configuration.java:289) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.conf.Configuration$Resource.(Configuration.java:257) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.conf.Configuration$Resource.(Configuration.java:249) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.conf.Configuration.addResource(Configuration.java:952) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:5193) [hive-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.conf.HiveConf.(HiveConf.java:5150) [hive-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.(QTestUtil.java:516) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$1.invokeInternal(CoreCliDriver.java:67) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$1.invokeInternal(CoreCliDriver.java:63) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.beforeClass(CoreCliDriver.java:69) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:71) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:19:51,596 DEBUG [main] util.Shell: setsid exited with exit code 0 2018-07-21T05:19:51,622 DEBUG [main] util.NativeCodeLoader: Trying to load the custom-built native-hadoop library... 2018-07-21T05:19:51,623 DEBUG [main] util.NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path 2018-07-21T05:19:51,623 DEBUG [main] util.NativeCodeLoader: java.library.path=/usr/java/packages/lib/amd64:/usr/lib/x86_64-linux-gnu/jni:/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:/usr/lib/jni:/lib:/usr/lib 2018-07-21T05:19:51,623 WARN [main] util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2018-07-21T05:19:51,624 DEBUG [main] util.PerformanceAdvisory: Falling back to shell based 2018-07-21T05:19:51,687 DEBUG [main] conf.HiveConf: Found metastore URI of null 2018-07-21T05:19:51,708 WARN [main] conf.HiveConf: HiveConf of name hive.dummyparam.test.server.specific.config.override does not exist 2018-07-21T05:19:51,708 WARN [main] conf.HiveConf: HiveConf of name hive.metastore.metadb.dir does not exist 2018-07-21T05:19:51,709 WARN [main] conf.HiveConf: HiveConf of name hive.dummyparam.test.server.specific.config.metastoresite does not exist 2018-07-21T05:19:51,709 WARN [main] conf.HiveConf: HiveConf of name hive.llap.daemon.service.port does not exist 2018-07-21T05:19:51,775 INFO [main] hdfs.MiniDFSCluster: starting cluster: numNameNodes=1, numDataNodes=4 2018-07-21T05:19:52,381 DEBUG [main] util.KMSUtil: Creating key provider with config key hadoop.security.key.provider.path 2018-07-21T05:19:52,462 INFO [main] util.GSet: Computing capacity for map BlocksMap 2018-07-21T05:19:52,462 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:52,464 INFO [main] util.GSet: 2.0% max memory 1.8 GB = 36.4 MB 2018-07-21T05:19:52,464 INFO [main] util.GSet: capacity = 2^22 = 4194304 entries 2018-07-21T05:19:52,465 DEBUG [main] util.GSet: recommended=4194304, actual=4194304 2018-07-21T05:19:52,559 INFO [main] util.GSet: Computing capacity for map INodeMap 2018-07-21T05:19:52,559 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:52,560 INFO [main] util.GSet: 1.0% max memory 1.8 GB = 18.2 MB 2018-07-21T05:19:52,560 INFO [main] util.GSet: capacity = 2^21 = 2097152 entries 2018-07-21T05:19:52,560 DEBUG [main] util.GSet: recommended=2097152, actual=2097152 2018-07-21T05:19:52,583 INFO [main] util.GSet: Computing capacity for map cachedBlocks 2018-07-21T05:19:52,584 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:52,584 INFO [main] util.GSet: 0.25% max memory 1.8 GB = 4.6 MB 2018-07-21T05:19:52,584 INFO [main] util.GSet: capacity = 2^19 = 524288 entries 2018-07-21T05:19:52,584 DEBUG [main] util.GSet: recommended=524288, actual=524288 2018-07-21T05:19:52,609 INFO [main] util.GSet: Computing capacity for map NameNodeRetryCache 2018-07-21T05:19:52,609 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:52,609 INFO [main] util.GSet: 0.029999999329447746% max memory 1.8 GB = 559.3 KB 2018-07-21T05:19:52,609 INFO [main] util.GSet: capacity = 2^16 = 65536 entries 2018-07-21T05:19:52,610 DEBUG [main] util.GSet: recommended=65536, actual=65536 2018-07-21T05:19:52,970 DEBUG [main] converters.BooleanConverter: Setting default value: false 2018-07-21T05:19:52,970 DEBUG [main] converters.BooleanConverter: Converting 'Boolean' value 'false' to type 'Boolean' 2018-07-21T05:19:52,971 DEBUG [main] converters.BooleanConverter: No conversion required, value is already a Boolean 2018-07-21T05:19:52,976 DEBUG [main] converters.ByteConverter: Setting default value: 0 2018-07-21T05:19:52,976 DEBUG [main] converters.ByteConverter: Converting 'Integer' value '0' to type 'Byte' 2018-07-21T05:19:52,976 DEBUG [main] converters.ByteConverter: Converted to Byte value '0' 2018-07-21T05:19:52,979 DEBUG [main] converters.CharacterConverter: Setting default value: 2018-07-21T05:19:52,979 DEBUG [main] converters.CharacterConverter: Converting 'Character' value ' ' to type 'Character' 2018-07-21T05:19:52,979 DEBUG [main] converters.CharacterConverter: No conversion required, value is already a Character 2018-07-21T05:19:52,981 DEBUG [main] converters.DoubleConverter: Setting default value: 0 2018-07-21T05:19:52,981 DEBUG [main] converters.DoubleConverter: Converting 'Integer' value '0' to type 'Double' 2018-07-21T05:19:52,981 DEBUG [main] converters.DoubleConverter: Converted to Double value '0.0' 2018-07-21T05:19:52,984 DEBUG [main] converters.FloatConverter: Setting default value: 0 2018-07-21T05:19:52,984 DEBUG [main] converters.FloatConverter: Converting 'Integer' value '0' to type 'Float' 2018-07-21T05:19:52,984 DEBUG [main] converters.FloatConverter: Converted to Float value '0.0' 2018-07-21T05:19:52,986 DEBUG [main] converters.IntegerConverter: Setting default value: 0 2018-07-21T05:19:52,986 DEBUG [main] converters.IntegerConverter: Converting 'Integer' value '0' to type 'Integer' 2018-07-21T05:19:52,986 DEBUG [main] converters.IntegerConverter: No conversion required, value is already a Integer 2018-07-21T05:19:52,989 DEBUG [main] converters.LongConverter: Setting default value: 0 2018-07-21T05:19:52,989 DEBUG [main] converters.LongConverter: Converting 'Integer' value '0' to type 'Long' 2018-07-21T05:19:52,989 DEBUG [main] converters.LongConverter: Converted to Long value '0' 2018-07-21T05:19:52,991 DEBUG [main] converters.ShortConverter: Setting default value: 0 2018-07-21T05:19:52,991 DEBUG [main] converters.ShortConverter: Converting 'Integer' value '0' to type 'Short' 2018-07-21T05:19:52,991 DEBUG [main] converters.ShortConverter: Converted to Short value '0' 2018-07-21T05:19:52,995 DEBUG [main] converters.BigDecimalConverter: Setting default value: 0.0 2018-07-21T05:19:52,995 DEBUG [main] converters.BigDecimalConverter: Converting 'BigDecimal' value '0.0' to type 'BigDecimal' 2018-07-21T05:19:52,995 DEBUG [main] converters.BigDecimalConverter: No conversion required, value is already a BigDecimal 2018-07-21T05:19:52,998 DEBUG [main] converters.BigIntegerConverter: Setting default value: 0 2018-07-21T05:19:52,998 DEBUG [main] converters.BigIntegerConverter: Converting 'BigInteger' value '0' to type 'BigInteger' 2018-07-21T05:19:52,998 DEBUG [main] converters.BigIntegerConverter: No conversion required, value is already a BigInteger 2018-07-21T05:19:52,998 DEBUG [main] converters.BooleanConverter: Setting default value: false 2018-07-21T05:19:52,998 DEBUG [main] converters.BooleanConverter: Converting 'Boolean' value 'false' to type 'Boolean' 2018-07-21T05:19:52,998 DEBUG [main] converters.BooleanConverter: No conversion required, value is already a Boolean 2018-07-21T05:19:52,998 DEBUG [main] converters.ByteConverter: Setting default value: 0 2018-07-21T05:19:52,998 DEBUG [main] converters.ByteConverter: Converting 'Integer' value '0' to type 'Byte' 2018-07-21T05:19:52,998 DEBUG [main] converters.ByteConverter: Converted to Byte value '0' 2018-07-21T05:19:52,998 DEBUG [main] converters.CharacterConverter: Setting default value: 2018-07-21T05:19:52,998 DEBUG [main] converters.CharacterConverter: Converting 'Character' value ' ' to type 'Character' 2018-07-21T05:19:52,998 DEBUG [main] converters.CharacterConverter: No conversion required, value is already a Character 2018-07-21T05:19:52,998 DEBUG [main] converters.DoubleConverter: Setting default value: 0 2018-07-21T05:19:52,999 DEBUG [main] converters.DoubleConverter: Converting 'Integer' value '0' to type 'Double' 2018-07-21T05:19:52,999 DEBUG [main] converters.DoubleConverter: Converted to Double value '0.0' 2018-07-21T05:19:52,999 DEBUG [main] converters.FloatConverter: Setting default value: 0 2018-07-21T05:19:52,999 DEBUG [main] converters.FloatConverter: Converting 'Integer' value '0' to type 'Float' 2018-07-21T05:19:52,999 DEBUG [main] converters.FloatConverter: Converted to Float value '0.0' 2018-07-21T05:19:52,999 DEBUG [main] converters.IntegerConverter: Setting default value: 0 2018-07-21T05:19:52,999 DEBUG [main] converters.IntegerConverter: Converting 'Integer' value '0' to type 'Integer' 2018-07-21T05:19:52,999 DEBUG [main] converters.IntegerConverter: No conversion required, value is already a Integer 2018-07-21T05:19:52,999 DEBUG [main] converters.LongConverter: Setting default value: 0 2018-07-21T05:19:52,999 DEBUG [main] converters.LongConverter: Converting 'Integer' value '0' to type 'Long' 2018-07-21T05:19:52,999 DEBUG [main] converters.LongConverter: Converted to Long value '0' 2018-07-21T05:19:52,999 DEBUG [main] converters.ShortConverter: Setting default value: 0 2018-07-21T05:19:52,999 DEBUG [main] converters.ShortConverter: Converting 'Integer' value '0' to type 'Short' 2018-07-21T05:19:52,999 DEBUG [main] converters.ShortConverter: Converted to Short value '0' 2018-07-21T05:19:53,002 DEBUG [main] converters.StringConverter: Setting default value: 2018-07-21T05:19:53,002 DEBUG [main] converters.StringConverter: Converting 'String' value '' to type 'String' 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: Setting default value: [Z@bf71cec 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: Converting 'boolean[]' value '[Z@bf71cec' to type 'boolean[]' 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a boolean[] 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: Setting default value: [B@22d6cac2 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: Converting 'byte[]' value '[B@22d6cac2' to type 'byte[]' 2018-07-21T05:19:53,014 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a byte[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [C@30cdae70 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'char[]' value '[C@30cdae70' to type 'char[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a char[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [D@1654a892 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'double[]' value '[D@1654a892' to type 'double[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a double[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [F@2577d6c8 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'float[]' value '[F@2577d6c8' to type 'float[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a float[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [I@3163987e 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'int[]' value '[I@3163987e' to type 'int[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a int[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [J@6c000e0c 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'long[]' value '[J@6c000e0c' to type 'long[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a long[] 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Setting default value: [S@5f233b26 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: Converting 'short[]' value '[S@5f233b26' to type 'short[]' 2018-07-21T05:19:53,015 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a short[] 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.math.BigDecimal;@44f9779c 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Converting 'BigDecimal[]' value '[Ljava.math.BigDecimal;@44f9779c' to type 'BigDecimal[]' 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a BigDecimal[] 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.math.BigInteger;@5e8a459 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Converting 'BigInteger[]' value '[Ljava.math.BigInteger;@5e8a459' to type 'BigInteger[]' 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a BigInteger[] 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Boolean;@4c9e9fb8 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Converting 'Boolean[]' value '[Ljava.lang.Boolean;@4c9e9fb8' to type 'Boolean[]' 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Boolean[] 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Byte;@40147317 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Converting 'Byte[]' value '[Ljava.lang.Byte;@40147317' to type 'Byte[]' 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Byte[] 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Character;@19542407 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: Converting 'Character[]' value '[Ljava.lang.Character;@19542407' to type 'Character[]' 2018-07-21T05:19:53,016 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Character[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Double;@c7a977f 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'Double[]' value '[Ljava.lang.Double;@c7a977f' to type 'Double[]' 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Double[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Float;@6d868997 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'Float[]' value '[Ljava.lang.Float;@6d868997' to type 'Float[]' 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Float[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Integer;@74a195a4 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'Integer[]' value '[Ljava.lang.Integer;@74a195a4' to type 'Integer[]' 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Integer[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Long;@2fa3be26 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'Long[]' value '[Ljava.lang.Long;@2fa3be26' to type 'Long[]' 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Long[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Short;@3af37506 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'Short[]' value '[Ljava.lang.Short;@3af37506' to type 'Short[]' 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Short[] 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.String;@7c0da600 2018-07-21T05:19:53,017 DEBUG [main] converters.ArrayConverter: Converting 'String[]' value '[Ljava.lang.String;@7c0da600' to type 'String[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a String[] 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.lang.Class;@d4602a 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Converting 'Class[]' value '[Ljava.lang.Class;@d4602a' to type 'Class[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Class[] 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.util.Date;@21ae6e73 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Converting 'Date[]' value '[Ljava.util.Date;@21ae6e73' to type 'Date[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Date[] 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.util.Calendar;@15515c51 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Converting 'Calendar[]' value '[Ljava.util.Calendar;@15515c51' to type 'Calendar[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a Calendar[] 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.io.File;@64a896b0 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Converting 'java.io.File[]' value '[Ljava.io.File;@64a896b0' to type 'java.io.File[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a java.io.File[] 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.sql.Date;@11a82d0f 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: Converting 'java.sql.Date[]' value '[Ljava.sql.Date;@11a82d0f' to type 'java.sql.Date[]' 2018-07-21T05:19:53,018 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a java.sql.Date[] 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.sql.Time;@3ae66c85 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Converting 'java.sql.Time[]' value '[Ljava.sql.Time;@3ae66c85' to type 'java.sql.Time[]' 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a java.sql.Time[] 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.sql.Timestamp;@4604b900 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Converting 'java.sql.Timestamp[]' value '[Ljava.sql.Timestamp;@4604b900' to type 'java.sql.Timestamp[]' 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a java.sql.Timestamp[] 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Setting default value: [Ljava.net.URL;@e36bb2a 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: Converting 'java.net.URL[]' value '[Ljava.net.URL;@e36bb2a' to type 'java.net.URL[]' 2018-07-21T05:19:53,019 DEBUG [main] converters.ArrayConverter: No conversion required, value is already a java.net.URL[] 2018-07-21T05:19:53,086 INFO [main] beanutils.FluentPropertyBeanIntrospector: Error when creating PropertyDescriptor for public final void org.apache.commons.configuration2.AbstractConfiguration.setProperty(java.lang.String,java.lang.Object)! Ignoring this property. 2018-07-21T05:19:53,086 DEBUG [main] beanutils.FluentPropertyBeanIntrospector: Exception is: java.beans.IntrospectionException: bad write method arg count: public final void org.apache.commons.configuration2.AbstractConfiguration.setProperty(java.lang.String,java.lang.Object) at java.beans.PropertyDescriptor.findPropertyType(PropertyDescriptor.java:657) ~[?:1.8.0_102] at java.beans.PropertyDescriptor.setWriteMethod(PropertyDescriptor.java:327) ~[?:1.8.0_102] at java.beans.PropertyDescriptor.(PropertyDescriptor.java:139) ~[?:1.8.0_102] at org.apache.commons.beanutils.FluentPropertyBeanIntrospector.createFluentPropertyDescritor(FluentPropertyBeanIntrospector.java:178) ~[commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.beanutils.FluentPropertyBeanIntrospector.introspect(FluentPropertyBeanIntrospector.java:141) [commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.beanutils.PropertyUtilsBean.fetchIntrospectionData(PropertyUtilsBean.java:2245) [commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.beanutils.PropertyUtilsBean.getIntrospectionData(PropertyUtilsBean.java:2226) [commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.beanutils.PropertyUtilsBean.getPropertyDescriptor(PropertyUtilsBean.java:954) [commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.beanutils.PropertyUtilsBean.isWriteable(PropertyUtilsBean.java:1478) [commons-beanutils-1.9.3.jar:1.9.3] at org.apache.commons.configuration2.beanutils.BeanHelper.isPropertyWriteable(BeanHelper.java:521) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.initProperty(BeanHelper.java:357) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.initBeanProperties(BeanHelper.java:273) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.initBean(BeanHelper.java:192) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper$BeanCreationContextImpl.initBean(BeanHelper.java:669) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.DefaultBeanFactory.initBeanInstance(DefaultBeanFactory.java:162) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.DefaultBeanFactory.createBean(DefaultBeanFactory.java:116) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.createBean(BeanHelper.java:459) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.createBean(BeanHelper.java:479) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.beanutils.BeanHelper.createBean(BeanHelper.java:492) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.builder.BasicConfigurationBuilder.createResultInstance(BasicConfigurationBuilder.java:447) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.builder.BasicConfigurationBuilder.createResult(BasicConfigurationBuilder.java:417) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.commons.configuration2.builder.BasicConfigurationBuilder.getConfiguration(BasicConfigurationBuilder.java:285) [commons-configuration2-2.1.1.jar:2.1.1] at org.apache.hadoop.metrics2.impl.MetricsConfig.loadFirst(MetricsConfig.java:119) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.impl.MetricsConfig.create(MetricsConfig.java:98) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.impl.MetricsSystemImpl.configure(MetricsSystemImpl.java:478) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.impl.MetricsSystemImpl.start(MetricsSystemImpl.java:188) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.impl.MetricsSystemImpl.init(MetricsSystemImpl.java:163) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.init(DefaultMetricsSystem.java:62) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.initialize(DefaultMetricsSystem.java:58) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1673) [hadoop-hdfs-3.1.0.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1288) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1057) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:932) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:864) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.(MiniDFSCluster.java:497) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:456) [hadoop-hdfs-3.1.0-tests.jar:?] at org.apache.hadoop.hive.shims.Hadoop23Shims.getMiniDfs(Hadoop23Shims.java:565) [hive-shims-0.23-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.shims.Hadoop23Shims.getMiniDfs(Hadoop23Shims.java:538) [hive-shims-0.23-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.setupFileSystem(QTestUtil.java:604) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.(QTestUtil.java:532) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$1.invokeInternal(CoreCliDriver.java:67) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$1.invokeInternal(CoreCliDriver.java:63) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.beforeClass(CoreCliDriver.java:69) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:71) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:19:53,114 WARN [main] impl.MetricsConfig: Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2018-07-21T05:19:53,143 INFO [main] impl.MetricsSystemImpl: Scheduled Metric snapshot period at 10 second(s). 2018-07-21T05:19:53,143 INFO [main] impl.MetricsSystemImpl: NameNode metrics system started 2018-07-21T05:19:53,184 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:53,188 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:53,276 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:19:53,278 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:19:53,279 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@4e558728] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:19:53,293 INFO [main] hdfs.DFSUtil: Starting Web-server for hdfs at: http://localhost:0 2018-07-21T05:19:53,417 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:19:53,425 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:19:53,440 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:19:53,444 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs 2018-07-21T05:19:53,444 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:19:53,444 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:19:53,480 INFO [main] http.HttpServer2: Added filter 'org.apache.hadoop.hdfs.web.AuthFilter' (class=org.apache.hadoop.hdfs.web.AuthFilter) 2018-07-21T05:19:53,480 INFO [main] http.HttpServer2: addJerseyResourcePackage: packageName=org.apache.hadoop.hdfs.server.namenode.web.resources;org.apache.hadoop.hdfs.web.resources, pathSpec=/webhdfs/v1/* 2018-07-21T05:19:53,484 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@8c46918{HTTP/1.1,[http/1.1]}{localhost:0}] 2018-07-21T05:19:53,489 INFO [main] http.HttpServer2: Jetty bound to port 38307 2018-07-21T05:19:53,903 DEBUG [main] util.KMSUtil: Creating key provider with config key hadoop.security.key.provider.path 2018-07-21T05:19:53,904 INFO [main] util.GSet: Computing capacity for map BlocksMap 2018-07-21T05:19:53,904 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:53,905 INFO [main] util.GSet: 2.0% max memory 1.8 GB = 36.4 MB 2018-07-21T05:19:53,905 INFO [main] util.GSet: capacity = 2^22 = 4194304 entries 2018-07-21T05:19:53,905 DEBUG [main] util.GSet: recommended=4194304, actual=4194304 2018-07-21T05:19:53,909 INFO [main] util.GSet: Computing capacity for map INodeMap 2018-07-21T05:19:53,909 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:53,909 INFO [main] util.GSet: 1.0% max memory 1.8 GB = 18.2 MB 2018-07-21T05:19:53,909 INFO [main] util.GSet: capacity = 2^21 = 2097152 entries 2018-07-21T05:19:53,909 DEBUG [main] util.GSet: recommended=2097152, actual=2097152 2018-07-21T05:19:53,911 INFO [main] util.GSet: Computing capacity for map cachedBlocks 2018-07-21T05:19:53,911 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:53,912 INFO [main] util.GSet: 0.25% max memory 1.8 GB = 4.6 MB 2018-07-21T05:19:53,912 INFO [main] util.GSet: capacity = 2^19 = 524288 entries 2018-07-21T05:19:53,912 DEBUG [main] util.GSet: recommended=524288, actual=524288 2018-07-21T05:19:53,912 INFO [main] util.GSet: Computing capacity for map NameNodeRetryCache 2018-07-21T05:19:53,913 INFO [main] util.GSet: VM type = 64-bit 2018-07-21T05:19:53,913 INFO [main] util.GSet: 0.029999999329447746% max memory 1.8 GB = 559.3 KB 2018-07-21T05:19:53,913 INFO [main] util.GSet: capacity = 2^16 = 65536 entries 2018-07-21T05:19:53,913 DEBUG [main] util.GSet: recommended=65536, actual=65536 2018-07-21T05:19:53,966 DEBUG [main] erasurecode.CodecRegistry: Codec registered: codec = rs, coder = rs_native 2018-07-21T05:19:53,967 DEBUG [main] erasurecode.CodecRegistry: Codec registered: codec = xor, coder = xor_native 2018-07-21T05:19:53,969 DEBUG [main] erasurecode.CodecRegistry: Codec registered: codec = rs, coder = rs_java 2018-07-21T05:19:53,971 DEBUG [main] erasurecode.CodecRegistry: Codec registered: codec = rs-legacy, coder = rs-legacy_java 2018-07-21T05:19:53,972 DEBUG [main] erasurecode.CodecRegistry: Codec registered: codec = xor, coder = xor_java 2018-07-21T05:19:54,553 INFO [main] hdfs.StateChange: STATE* Leaving safe mode after 0 secs 2018-07-21T05:19:54,553 INFO [main] hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes 2018-07-21T05:19:54,553 INFO [main] hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks 2018-07-21T05:19:54,559 INFO [Reconstruction Queue Initializer] hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 6 msec 2018-07-21T05:19:54,614 WARN [main] common.MetricsLoggerTask: Metrics logging will not be async since the logger is not log4j 2018-07-21T05:19:54,639 INFO [main] hdfs.MiniDFSCluster: Starting DataNode 0 with dfs.datanode.data.dir: [DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1,[DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2 2018-07-21T05:19:54,671 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:54,672 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:54,672 DEBUG [main] fs.FileSystem: Loading filesystems 2018-07-21T05:19:54,674 DEBUG [main] fs.FileSystem: nullscan:// = class org.apache.hadoop.hive.ql.io.NullScanFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar 2018-07-21T05:19:54,674 DEBUG [main] fs.FileSystem: file:// = class org.apache.hadoop.fs.LocalFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar 2018-07-21T05:19:54,675 DEBUG [main] fs.FileSystem: file:// = class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar 2018-07-21T05:19:54,681 DEBUG [main] fs.FileSystem: viewfs:// = class org.apache.hadoop.fs.viewfs.ViewFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar 2018-07-21T05:19:54,684 DEBUG [main] fs.FileSystem: har:// = class org.apache.hadoop.fs.HarFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar 2018-07-21T05:19:54,686 DEBUG [main] fs.FileSystem: http:// = class org.apache.hadoop.fs.http.HttpFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar 2018-07-21T05:19:54,687 DEBUG [main] fs.FileSystem: https:// = class org.apache.hadoop.fs.http.HttpsFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar 2018-07-21T05:19:54,690 DEBUG [main] fs.FileSystem: hdfs:// = class org.apache.hadoop.hdfs.DistributedFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar 2018-07-21T05:19:54,734 DEBUG [main] fs.FileSystem: webhdfs:// = class org.apache.hadoop.hdfs.web.WebHdfsFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar 2018-07-21T05:19:54,735 DEBUG [main] fs.FileSystem: swebhdfs:// = class org.apache.hadoop.hdfs.web.SWebHdfsFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar 2018-07-21T05:19:54,741 DEBUG [main] fs.FileSystem: s3:// = class org.apache.hadoop.fs.s3.S3FileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar 2018-07-21T05:19:54,746 DEBUG [main] fs.FileSystem: s3n:// = class org.apache.hadoop.fs.s3native.NativeS3FileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar 2018-07-21T05:19:54,760 DEBUG [main] fs.FileSystem: s3a:// = class org.apache.hadoop.fs.s3a.S3AFileSystem from /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar 2018-07-21T05:19:54,760 DEBUG [main] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:19:54,760 DEBUG [main] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:19:54,760 DEBUG [main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:19:54,760 DEBUG [main] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:19:54,809 INFO [main] impl.MetricsSystemImpl: DataNode metrics system started (again) 2018-07-21T05:19:54,809 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:54,810 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:54,854 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:19:54,856 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:19:54,857 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:19:54,858 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode 2018-07-21T05:19:54,858 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:19:54,858 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:19:54,863 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@5679e96b{HTTP/1.1,[http/1.1]}{localhost:0}] 2018-07-21T05:19:54,864 INFO [main] http.HttpServer2: Jetty bound to port 46718 2018-07-21T05:19:54,994 DEBUG [main] logging.InternalLoggerFactory: Using SLF4J as the default logging framework 2018-07-21T05:19:54,998 DEBUG [main] channel.MultithreadEventLoopGroup: -Dio.netty.eventLoopThreads: 16 2018-07-21T05:19:55,030 DEBUG [main] internal.PlatformDependent0: -Dio.netty.noUnsafe: false 2018-07-21T05:19:55,031 DEBUG [main] internal.PlatformDependent0: Java version: 8 2018-07-21T05:19:55,032 DEBUG [main] internal.PlatformDependent0: sun.misc.Unsafe.theUnsafe: available 2018-07-21T05:19:55,033 DEBUG [main] internal.PlatformDependent0: sun.misc.Unsafe.copyMemory: available 2018-07-21T05:19:55,033 DEBUG [main] internal.PlatformDependent0: java.nio.Buffer.address: available 2018-07-21T05:19:55,034 DEBUG [main] internal.PlatformDependent0: direct buffer constructor: available 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent0: java.nio.Bits.unaligned: available, true 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent0: jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable prior to Java9 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent0: java.nio.DirectByteBuffer.(long, int): available 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent: sun.misc.Unsafe: available 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent: -Dio.netty.tmpdir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp (java.io.tmpdir) 2018-07-21T05:19:55,035 DEBUG [main] internal.PlatformDependent: -Dio.netty.bitMode: 64 (sun.arch.data.model) 2018-07-21T05:19:55,036 DEBUG [main] internal.PlatformDependent: -Dio.netty.noPreferDirect: false 2018-07-21T05:19:55,036 DEBUG [main] internal.PlatformDependent: -Dio.netty.maxDirectMemory: 1908932608 bytes 2018-07-21T05:19:55,036 DEBUG [main] internal.PlatformDependent: -Dio.netty.uninitializedArrayAllocationThreshold: -1 2018-07-21T05:19:55,038 DEBUG [main] internal.CleanerJava6: java.nio.ByteBuffer.cleaner(): available 2018-07-21T05:19:55,060 DEBUG [main] nio.NioEventLoop: -Dio.netty.noKeySetOptimization: false 2018-07-21T05:19:55,060 DEBUG [main] nio.NioEventLoop: -Dio.netty.selectorAutoRebuildThreshold: 512 2018-07-21T05:19:55,071 DEBUG [main] internal.PlatformDependent: org.jctools-core.MpscChunkedArrayQueue: available 2018-07-21T05:19:55,111 DEBUG [main] channel.DefaultChannelId: -Dio.netty.processId: 23905 (auto-detected) 2018-07-21T05:19:55,115 DEBUG [main] util.NetUtil: -Djava.net.preferIPv4Stack: true 2018-07-21T05:19:55,115 DEBUG [main] util.NetUtil: -Djava.net.preferIPv6Addresses: false 2018-07-21T05:19:55,117 DEBUG [main] util.NetUtil: Loopback interface: lo (lo, 127.0.0.1) 2018-07-21T05:19:55,117 DEBUG [main] util.NetUtil: /proc/sys/net/core/somaxconn: 128 2018-07-21T05:19:55,118 DEBUG [main] channel.DefaultChannelId: -Dio.netty.machineId: 42:01:0a:ff:fe:80:00:12 (auto-detected) 2018-07-21T05:19:55,126 DEBUG [main] internal.InternalThreadLocalMap: -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 2018-07-21T05:19:55,127 DEBUG [main] internal.InternalThreadLocalMap: -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 2018-07-21T05:19:55,141 DEBUG [main] util.ResourceLeakDetector: -Dio.netty.leakDetection.level: simple 2018-07-21T05:19:55,141 DEBUG [main] util.ResourceLeakDetector: -Dio.netty.leakDetection.targetRecords: 4 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.numHeapArenas: 16 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.numDirectArenas: 16 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.pageSize: 8192 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.maxOrder: 11 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.chunkSize: 16777216 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.tinyCacheSize: 512 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.smallCacheSize: 256 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.normalCacheSize: 64 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.maxCachedBufferCapacity: 32768 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.cacheTrimInterval: 8192 2018-07-21T05:19:55,175 DEBUG [main] buffer.PooledByteBufAllocator: -Dio.netty.allocator.useCacheForAllThreads: true 2018-07-21T05:19:55,185 DEBUG [main] buffer.ByteBufUtil: -Dio.netty.allocator.type: pooled 2018-07-21T05:19:55,185 DEBUG [main] buffer.ByteBufUtil: -Dio.netty.threadLocalDirectBufferSize: 65536 2018-07-21T05:19:55,185 DEBUG [main] buffer.ByteBufUtil: -Dio.netty.maxThreadLocalCharBufferSize: 16384 2018-07-21T05:19:55,203 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:19:55,204 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:19:55,204 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@1a6dc589] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:19:55,284 WARN [main] common.MetricsLoggerTask: Metrics logging will not be async since the logger is not log4j 2018-07-21T05:19:55,290 INFO [main] hdfs.MiniDFSCluster: Starting DataNode 1 with dfs.datanode.data.dir: [DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3,[DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4 2018-07-21T05:19:55,382 INFO [main] impl.MetricsSystemImpl: DataNode metrics system started (again) 2018-07-21T05:19:55,382 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:55,382 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:55,389 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:19:55,399 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:19:55,400 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:19:55,402 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode 2018-07-21T05:19:55,402 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:19:55,402 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:19:55,403 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@2d5ef498{HTTP/1.1,[http/1.1]}{localhost:0}] 2018-07-21T05:19:55,403 INFO [main] http.HttpServer2: Jetty bound to port 40452 2018-07-21T05:19:55,605 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:19:55,605 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:19:55,611 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@63cd2cd2] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:19:55,650 WARN [main] common.MetricsLoggerTask: Metrics logging will not be async since the logger is not log4j 2018-07-21T05:19:55,801 INFO [main] hdfs.MiniDFSCluster: Starting DataNode 2 with dfs.datanode.data.dir: [DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5,[DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6 2018-07-21T05:19:55,886 INFO [main] impl.MetricsSystemImpl: DataNode metrics system started (again) 2018-07-21T05:19:55,887 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:55,887 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:55,905 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:19:55,936 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:19:55,937 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:19:55,938 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode 2018-07-21T05:19:55,938 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:19:55,938 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:19:55,938 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@5922d3e9{HTTP/1.1,[http/1.1]}{localhost:0}] 2018-07-21T05:19:55,939 INFO [main] http.HttpServer2: Jetty bound to port 58111 2018-07-21T05:19:56,297 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:19:56,297 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:19:56,297 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@5961e92d] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:19:56,346 WARN [main] common.MetricsLoggerTask: Metrics logging will not be async since the logger is not log4j 2018-07-21T05:19:56,356 INFO [main] hdfs.MiniDFSCluster: Starting DataNode 3 with dfs.datanode.data.dir: [DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7,[DISK]file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8 2018-07-21T05:19:56,443 INFO [main] impl.MetricsSystemImpl: DataNode metrics system started (again) 2018-07-21T05:19:56,443 DEBUG [main] core.Tracer: sampler.classes = ; loaded no samplers 2018-07-21T05:19:56,445 DEBUG [main] core.Tracer: span.receiver.classes = ; loaded no span receivers 2018-07-21T05:19:56,461 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:19:56,471 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:19:56,473 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:19:56,474 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode 2018-07-21T05:19:56,474 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:19:56,475 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:19:56,475 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@788ba63e{HTTP/1.1,[http/1.1]}{localhost:0}] 2018-07-21T05:19:56,475 INFO [main] http.HttpServer2: Jetty bound to port 35748 2018-07-21T05:19:56,601 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(127.0.0.1:52570, datanodeUuid=36c7baef-5d8d-44ef-8960-e5e2e161c97a, infoPort=42302, infoSecurePort=0, ipcPort=48537, storageInfo=lv=-57;cid=testClusterID;nsid=345689785;c=1532175592633) storage 36c7baef-5d8d-44ef-8960-e5e2e161c97a 2018-07-21T05:19:56,604 INFO [IPC Server handler 3 on 35925] net.NetworkTopology: Adding a new node: /default-rack/127.0.0.1:52570 2018-07-21T05:19:56,605 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:1 /default-rack/127.0.0.1:52570 2018-07-21T05:19:56,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:1 /default-rack/127.0.0.1:52570 2018-07-21T05:19:56,611 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(127.0.0.1:33099, datanodeUuid=8f95c842-a0c9-4e85-941e-02356267dd5f, infoPort=46305, infoSecurePort=0, ipcPort=35555, storageInfo=lv=-57;cid=testClusterID;nsid=345689785;c=1532175592633) storage 8f95c842-a0c9-4e85-941e-02356267dd5f 2018-07-21T05:19:56,617 INFO [IPC Server handler 6 on 35925] net.NetworkTopology: Adding a new node: /default-rack/127.0.0.1:33099 2018-07-21T05:19:56,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:2 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 2018-07-21T05:19:56,618 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:2 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 2018-07-21T05:19:56,764 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(127.0.0.1:40780, datanodeUuid=425f2bd0-2dbf-418a-a2b5-a7e216df54e6, infoPort=43939, infoSecurePort=0, ipcPort=59862, storageInfo=lv=-57;cid=testClusterID;nsid=345689785;c=1532175592633) storage 425f2bd0-2dbf-418a-a2b5-a7e216df54e6 2018-07-21T05:19:56,764 INFO [IPC Server handler 5 on 35925] net.NetworkTopology: Adding a new node: /default-rack/127.0.0.1:40780 2018-07-21T05:19:56,764 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:3 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 /default-rack/127.0.0.1:40780 2018-07-21T05:19:56,764 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:3 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 /default-rack/127.0.0.1:40780 2018-07-21T05:19:56,898 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:19:56,898 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:19:56,900 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@2f0ed952] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:19:56,907 WARN [main] common.MetricsLoggerTask: Metrics logging will not be async since the logger is not log4j 2018-07-21T05:19:57,065 DEBUG [main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:19:57,378 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(127.0.0.1:45625, datanodeUuid=abcf8c77-f058-404d-990a-8a939ff08e3b, infoPort=50043, infoSecurePort=0, ipcPort=52446, storageInfo=lv=-57;cid=testClusterID;nsid=345689785;c=1532175592633) storage abcf8c77-f058-404d-990a-8a939ff08e3b 2018-07-21T05:19:57,379 INFO [IPC Server handler 3 on 35925] net.NetworkTopology: Adding a new node: /default-rack/127.0.0.1:45625 2018-07-21T05:19:57,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:4 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 /default-rack/127.0.0.1:40780 /default-rack/127.0.0.1:45625 2018-07-21T05:19:57,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: NetworkTopology became: Number of racks: 1 Expected number of leaves:4 /default-rack/127.0.0.1:52570 /default-rack/127.0.0.1:33099 /default-rack/127.0.0.1:40780 /default-rack/127.0.0.1:45625 2018-07-21T05:19:57,827 DEBUG [main] util.PerformanceAdvisory: Both short-circuit local reads and UNIX domain socket are disabled. 2018-07-21T05:19:58,077 INFO [main] hdfs.MiniDFSCluster: Cluster is active 2018-07-21T05:19:58,099 DEBUG [main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:19:58,099 DEBUG [main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:19:58,099 DEBUG [main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:19:58,099 DEBUG [main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:19:58,100 DEBUG [main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:19:58,226 INFO [main] server.ZooKeeperServer: Server environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT 2018-07-21T05:19:58,227 INFO [main] server.ZooKeeperServer: Server environment:host.name=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:19:58,227 INFO [main] server.ZooKeeperServer: Server environment:java.version=1.8.0_102 2018-07-21T05:19:58,228 INFO [main] server.ZooKeeperServer: Server environment:java.vendor=Oracle Corporation 2018-07-21T05:19:58,231 INFO [main] server.ZooKeeperServer: Server environment:java.home=/usr/lib/jvm/java-8-openjdk-amd64/jre 2018-07-21T05:19:58,241 INFO [main] server.ZooKeeperServer: Server environment:java.class.path=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:java.library.path=/usr/java/packages/lib/amd64:/usr/lib/x86_64-linux-gnu/jni:/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:/usr/lib/jni:/lib:/usr/lib 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:java.io.tmpdir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:java.compiler= 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:os.name=Linux 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:os.arch=amd64 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:os.version=3.16.0-4-amd64 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:user.name=hiveptest 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:user.home=/home/hiveptest 2018-07-21T05:19:58,243 INFO [main] server.ZooKeeperServer: Server environment:user.dir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest 2018-07-21T05:19:58,290 INFO [main] server.ZooKeeperServer: Created server with tickTime 2000 minSessionTimeout 4000 maxSessionTimeout 40000 datadir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/tmp_19026/zookeeper_0/version-2 snapdir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/tmp_19026/zookeeper_0/version-2 2018-07-21T05:19:58,297 INFO [main] server.ZooKeeperServer: minSessionTimeout set to -1 2018-07-21T05:19:58,297 INFO [main] server.ZooKeeperServer: maxSessionTimeout set to -1 2018-07-21T05:19:58,308 INFO [main] server.NIOServerCnxnFactory: binding to port 0.0.0.0/0.0.0.0:63672 2018-07-21T05:19:58,400 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53430 2018-07-21T05:19:58,434 INFO [main] zookeeper.MiniZooKeeperCluster: Started MiniZooKeeperCluster and ran successful 'stat' on client port=63672 2018-07-21T05:19:58,452 INFO [main] zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT 2018-07-21T05:19:58,453 INFO [main] zookeeper.ZooKeeper: Client environment:host.name=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:19:58,453 INFO [main] zookeeper.ZooKeeper: Client environment:java.version=1.8.0_102 2018-07-21T05:19:58,453 INFO [main] zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation 2018-07-21T05:19:58,453 INFO [main] zookeeper.ZooKeeper: Client environment:java.home=/usr/lib/jvm/java-8-openjdk-amd64/jre 2018-07-21T05:19:58,453 INFO [main] zookeeper.ZooKeeper: Client environment:java.class.path=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:java.library.path=/usr/java/packages/lib/amd64:/usr/lib/x86_64-linux-gnu/jni:/lib/x86_64-linux-gnu:/usr/lib/x86_64-linux-gnu:/usr/lib/jni:/lib:/usr/lib 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:java.io.tmpdir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:java.compiler= 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:os.name=Linux 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:os.arch=amd64 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:os.version=3.16.0-4-amd64 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:user.name=hiveptest 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:user.home=/home/hiveptest 2018-07-21T05:19:58,455 INFO [main] zookeeper.ZooKeeper: Client environment:user.dir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest 2018-07-21T05:19:58,456 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@74024f3 2018-07-21T05:19:58,502 INFO [main] druid.MiniDruidCluster: Creating the druid directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data] 2018-07-21T05:19:58,519 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53431 2018-07-21T05:19:58,520 INFO [main] druid.ForkingDruidNode: Creating forking druid node with java -server -XX:MaxDirectMemorySize=2g -Xmx512m -Xms512m -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Ddruid.emitter=logging -Ddruid.emitter.logging.logLevel=info -server -cp /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar -Dhadoop.log.dir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Dhadoop.bin.path=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../testutils/hadoop -Djava.io.tmpdir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Ddruid.metadata.storage.type=derby -Ddruid.storage.type=hdfs -Ddruid.processing.buffer.sizeBytes=213870912 -Ddruid.processing.numThreads=2 -Ddruid.indexer.logs.type=file -Ddruid.coordinator.asOverlord.enabled=true -Ddruid.coordinator.asOverlord.overlordService=druid/overlord -Ddruid.coordinator.period=PT10S -Ddruid.manager.segments.pollDuration=PT10S -Ddruid.metadata.storage.connector.connectURI=jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db;create=true -Ddruid.indexer.logs.directory=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/log/indexer-log -Ddruid.zk.service.host=localhost:63672 -Ddruid.coordinator.startDelay=PT1S -Ddruid.indexer.runner=local -Ddruid.storage.storageDirectory=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage io.druid.cli.Main server coordinator 2018-07-21T05:19:58,521 INFO [main] druid.ForkingDruidNode: Creating forking druid node with java -server -XX:MaxDirectMemorySize=10g -Xmx512m -Xmx512m -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Ddruid.emitter=logging -Ddruid.emitter.logging.logLevel=info -server -cp /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar -Dhadoop.log.dir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Dhadoop.bin.path=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../testutils/hadoop -Djava.io.tmpdir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Ddruid.metadata.storage.type=derby -Ddruid.storage.type=hdfs -Ddruid.processing.buffer.sizeBytes=213870912 -Ddruid.processing.numThreads=2 -Ddruid.server.maxSize=130000000000 -Ddruid.zk.service.host=localhost:63672 -Ddruid.segmentCache.locations=[{"path":"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid/segment-cache","maxSize":130000000000}] -Ddruid.storage.storageDirectory=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage io.druid.cli.Main server historical 2018-07-21T05:19:58,529 INFO [main] druid.ForkingDruidNode: Creating forking druid node with java -server -XX:MaxDirectMemorySize=10g -Xmx512m -Xmx512m -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Ddruid.emitter=logging -Ddruid.emitter.logging.logLevel=info -server -cp /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar -Dhadoop.log.dir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Dhadoop.bin.path=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../testutils/hadoop -Djava.io.tmpdir=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp -Ddruid.metadata.storage.type=derby -Ddruid.storage.type=hdfs -Ddruid.processing.buffer.sizeBytes=213870912 -Ddruid.processing.numThreads=2 -Ddruid.server.maxSize=130000000000 -Ddruid.zk.service.host=localhost:63672 -Ddruid.segmentCache.locations=[{"path":"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid/segment-cache","maxSize":130000000000}] -Ddruid.storage.storageDirectory=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage io.druid.cli.Main server broker 2018-07-21T05:19:58,532 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53431 2018-07-21T05:19:58,536 INFO [SyncThread:0] persistence.FileTxnLog: Creating new log file: log.1 2018-07-21T05:19:58,558 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0000 with negotiated timeout 40000 for client /127.0.0.1:53431 2018-07-21T05:19:58,609 DEBUG [main] service.AbstractService: Service: mini-druid entered state INITED 2018-07-21T05:19:58,638 INFO [main] druid.ForkingDruidNode: Started coordinator 2018-07-21T05:19:58,645 INFO [main] druid.ForkingDruidNode: Started historical 2018-07-21T05:19:58,668 INFO [main] druid.ForkingDruidNode: Started broker 2018-07-21T05:19:58,668 DEBUG [main] service.AbstractService: Service mini-druid is started 2018-07-21T05:19:58,824 DEBUG [main] service.AbstractService: Service: hive entered state INITED 2018-07-21T05:19:58,825 INFO [main] test.MiniTezCluster: Using Tez AppJar: /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar 2018-07-21T05:19:59,122 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:19:59,123 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:19:59,128 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:20:00,875 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /user/hiveptest/target/hive-tmpDir/TezAppJar.jar is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:00,886 INFO [main] test.MiniTezCluster: Set TEZ-LIB-URI to: hdfs://localhost:35925/user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:20:00,893 DEBUG [main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:20:00,896 DEBUG [main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:20:00,903 INFO [main] test.MiniTezCluster: mkdir: hdfs://localhost:35925/apps_staging_dir 2018-07-21T05:20:00,980 INFO [main] jobhistory.JobHistoryUtils: Default file system is set solely by core-default.xml therefore - ignoring 2018-07-21T05:20:01,044 INFO [main] net.ServerSocketUtil: Using port 9188 2018-07-21T05:20:01,082 INFO [main] resource.ResourceUtils: Unable to find 'resource-types.xml'. 2018-07-21T05:20:01,172 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.MiniYARNCluster$ResourceManagerWrapper_0 2018-07-21T05:20:01,245 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_0 2018-07-21T05:20:01,249 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_1 2018-07-21T05:20:01,250 DEBUG [main] service.CompositeService: hive: initing services, size=3 2018-07-21T05:20:01,250 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.MiniYARNCluster$ResourceManagerWrapper_0 entered state INITED 2018-07-21T05:20:01,278 DEBUG [main] service.AbstractService: Service: ResourceManager entered state INITED 2018-07-21T05:20:01,318 DEBUG [main] util.PerformanceAdvisory: Falling back to shell based 2018-07-21T05:20:01,319 INFO [main] security.Groups: clearing userToGroupsMap cache 2018-07-21T05:20:01,520 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.RMFatalEventType for class org.apache.hadoop.yarn.server.resourcemanager.ResourceManager$RMFatalEventDispatcher 2018-07-21T05:20:01,520 DEBUG [main] service.CompositeService: Adding service Dispatcher 2018-07-21T05:20:01,529 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.AdminService 2018-07-21T05:20:01,531 DEBUG [main] service.AbstractService: Service: RMActiveServices entered state INITED 2018-07-21T05:20:01,607 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.RMSecretManagerService 2018-07-21T05:20:01,612 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer 2018-07-21T05:20:01,613 DEBUG [main] service.CompositeService: Adding service AMLivelinessMonitor 2018-07-21T05:20:01,613 DEBUG [main] service.CompositeService: Adding service AMLivelinessMonitor 2018-07-21T05:20:01,617 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.rmapp.monitor.RMAppLifetimeMonitor 2018-07-21T05:20:01,635 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager 2018-07-21T05:20:01,667 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.PlacementConstraintManagerService 2018-07-21T05:20:01,757 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore entered state INITED 2018-07-21T05:20:01,757 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:01,758 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStoreEventType for class org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore$ForwardingEventHandler 2018-07-21T05:20:01,771 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.NodesListManagerEventType for class org.apache.hadoop.yarn.server.resourcemanager.NodesListManager 2018-07-21T05:20:01,771 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.NodesListManager 2018-07-21T05:20:01,942 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler 2018-07-21T05:20:01,957 DEBUG [main] service.CompositeService: Adding service SchedulerEventDispatcher 2018-07-21T05:20:01,958 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType for class org.apache.hadoop.yarn.event.EventDispatcher 2018-07-21T05:20:01,959 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType for class org.apache.hadoop.yarn.server.resourcemanager.ResourceManager$ApplicationEventDispatcher 2018-07-21T05:20:01,961 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType for class org.apache.hadoop.yarn.server.resourcemanager.ResourceManager$ApplicationAttemptEventDispatcher 2018-07-21T05:20:01,962 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType for class org.apache.hadoop.yarn.server.resourcemanager.ResourceManager$NodeEventDispatcher 2018-07-21T05:20:01,963 DEBUG [main] service.CompositeService: Adding service NMLivelinessMonitor 2018-07-21T05:20:02,025 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.ResourceTrackerService 2018-07-21T05:20:02,026 INFO [main] impl.MetricsSystemImpl: ResourceManager metrics system started (again) 2018-07-21T05:20:02,026 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.util.JvmPauseMonitor 2018-07-21T05:20:02,041 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService 2018-07-21T05:20:02,060 INFO [main] security.YarnAuthorizationProvider: org.apache.hadoop.yarn.security.ConfiguredYarnAuthorizer is instantiated. 2018-07-21T05:20:02,111 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEventType for class org.apache.hadoop.yarn.server.resourcemanager.RMAppManager 2018-07-21T05:20:02,144 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.ClientRMService 2018-07-21T05:20:02,186 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEventType for class org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher 2018-07-21T05:20:02,186 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher 2018-07-21T05:20:02,197 DEBUG [main] service.CompositeService: RMActiveServices: initing services, size=16 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.RMSecretManagerService entered state INITED 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer entered state INITED 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: AMLivelinessMonitor entered state INITED 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: AMLivelinessMonitor entered state INITED 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.rmapp.monitor.RMAppLifetimeMonitor entered state INITED 2018-07-21T05:20:02,197 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager entered state INITED 2018-07-21T05:20:02,205 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.PlacementConstraintManagerService entered state INITED 2018-07-21T05:20:02,205 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.NodesListManager entered state INITED 2018-07-21T05:20:02,233 INFO [main] util.HostsFileReader: Refreshing hosts (include/exclude) list 2018-07-21T05:20:02,241 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.NodesListManager: initing services, size=0 2018-07-21T05:20:02,241 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler entered state INITED 2018-07-21T05:20:02,395 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivitiesManager entered state INITED 2018-07-21T05:20:02,590 DEBUG [main] service.AbstractService: Service: SchedulerEventDispatcher entered state INITED 2018-07-21T05:20:02,591 DEBUG [main] service.AbstractService: Service: NMLivelinessMonitor entered state INITED 2018-07-21T05:20:02,591 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.ResourceTrackerService entered state INITED 2018-07-21T05:20:02,606 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:20:02,606 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService entered state INITED 2018-07-21T05:20:02,638 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.ClientRMService entered state INITED 2018-07-21T05:20:02,639 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher entered state INITED 2018-07-21T05:20:02,646 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter 2018-07-21T05:20:02,648 DEBUG [main] service.CompositeService: ResourceManager: initing services, size=3 2018-07-21T05:20:02,653 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:02,653 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.AdminService entered state INITED 2018-07-21T05:20:02,653 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.AdminService: initing services, size=0 2018-07-21T05:20:02,653 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter entered state INITED 2018-07-21T05:20:02,653 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter: initing services, size=0 2018-07-21T05:20:02,654 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType for class org.apache.hadoop.yarn.server.MiniYARNCluster$1 2018-07-21T05:20:02,655 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_0 entered state INITED 2018-07-21T05:20:02,657 DEBUG [main] service.AbstractService: Service: NodeManager entered state INITED 2018-07-21T05:20:02,658 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService entered state INITED 2018-07-21T05:20:02,658 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService is started 2018-07-21T05:20:02,791 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.DeletionService 2018-07-21T05:20:02,809 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService 2018-07-21T05:20:02,819 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl 2018-07-21T05:20:02,935 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService 2018-07-21T05:20:02,941 DEBUG [main] service.CompositeService: Adding service containers-launcher 2018-07-21T05:20:02,963 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler 2018-07-21T05:20:02,968 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices 2018-07-21T05:20:02,974 DEBUG [main] service.CompositeService: Adding service containers-monitor 2018-07-21T05:20:02,976 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$ContainerEventDispatcher 2018-07-21T05:20:02,977 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$ApplicationEventDispatcher 2018-07-21T05:20:02,978 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$LocalizationEventHandlerWrapper 2018-07-21T05:20:02,979 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices 2018-07-21T05:20:02,980 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorImpl 2018-07-21T05:20:02,980 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher 2018-07-21T05:20:02,981 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler 2018-07-21T05:20:02,981 DEBUG [main] service.CompositeService: Adding service Dispatcher 2018-07-21T05:20:02,981 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl 2018-07-21T05:20:03,004 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker 2018-07-21T05:20:03,014 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer 2018-07-21T05:20:03,026 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.ContainerManagerEventType for class org.apache.hadoop.yarn.server.MiniYARNCluster$CustomContainerManagerImpl 2018-07-21T05:20:03,027 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.NodeManagerEventType for class org.apache.hadoop.yarn.server.MiniYARNCluster$ShortCircuitedNodeManager 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: Adding service Dispatcher 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.util.JvmPauseMonitor 2018-07-21T05:20:03,027 INFO [main] impl.MetricsSystemImpl: NodeManager metrics system started (again) 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: NodeManager: initing services, size=9 2018-07-21T05:20:03,027 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.DeletionService entered state INITED 2018-07-21T05:20:03,027 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService entered state INITED 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService 2018-07-21T05:20:03,027 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService: initing services, size=1 2018-07-21T05:20:03,027 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService entered state INITED 2018-07-21T05:20:03,094 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:03,162 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl entered state INITED 2018-07-21T05:20:03,251 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl entered state INITED 2018-07-21T05:20:03,255 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler 2018-07-21T05:20:03,256 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler 2018-07-21T05:20:03,258 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService 2018-07-21T05:20:03,258 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService 2018-07-21T05:20:03,258 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl: initing services, size=8 2018-07-21T05:20:03,258 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService entered state INITED 2018-07-21T05:20:04,264 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker 2018-07-21T05:20:04,265 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker 2018-07-21T05:20:04,267 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService: initing services, size=1 2018-07-21T05:20:04,268 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker entered state INITED 2018-07-21T05:20:04,268 DEBUG [main] service.AbstractService: Service: containers-launcher entered state INITED 2018-07-21T05:20:04,270 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler entered state INITED 2018-07-21T05:20:04,270 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices entered state INITED 2018-07-21T05:20:04,333 DEBUG [main] service.AbstractService: Service: mapreduce_shuffle entered state INITED 2018-07-21T05:20:04,456 DEBUG [ShuffleHandler Netty Worker #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #0, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,469 DEBUG [ShuffleHandler Netty Worker #1] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #1, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,479 DEBUG [ShuffleHandler Netty Worker #2] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #2, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,495 DEBUG [ShuffleHandler Netty Worker #4] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #4, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,496 DEBUG [ShuffleHandler Netty Worker #3] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #3, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,516 DEBUG [ShuffleHandler Netty Worker #5] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #5, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,532 DEBUG [ShuffleHandler Netty Worker #6] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #6, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,547 DEBUG [ShuffleHandler Netty Worker #7] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #7, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,557 DEBUG [ShuffleHandler Netty Worker #8] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #8, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,563 DEBUG [ShuffleHandler Netty Worker #9] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #9, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,592 DEBUG [ShuffleHandler Netty Worker #10] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #10, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,601 DEBUG [ShuffleHandler Netty Worker #11] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #11, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,606 DEBUG [ShuffleHandler Netty Worker #12] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #12, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,626 DEBUG [ShuffleHandler Netty Worker #13] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #13, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,627 DEBUG [ShuffleHandler Netty Worker #14] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #14, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,640 DEBUG [ShuffleHandler Netty Worker #15] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #15, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,659 DEBUG [ShuffleHandler Netty Boss #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Boss #0, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:04,663 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:04,663 DEBUG [main] service.AbstractService: Service: containers-monitor entered state INITED 2018-07-21T05:20:04,667 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker: initing services, size=0 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:20:04,668 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl entered state INITED 2018-07-21T05:20:04,672 INFO [main] resource.ResourceUtils: Unable to find 'node-resources.xml'. 2018-07-21T05:20:04,680 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:04,680 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_1 entered state INITED 2018-07-21T05:20:04,714 DEBUG [main] service.AbstractService: Service: NodeManager entered state INITED 2018-07-21T05:20:04,714 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService entered state INITED 2018-07-21T05:20:04,714 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService is started 2018-07-21T05:20:04,744 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.DeletionService 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service containers-launcher 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices 2018-07-21T05:20:04,745 DEBUG [main] service.CompositeService: Adding service containers-monitor 2018-07-21T05:20:04,745 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$ContainerEventDispatcher 2018-07-21T05:20:04,745 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$ApplicationEventDispatcher 2018-07-21T05:20:04,746 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl$LocalizationEventHandlerWrapper 2018-07-21T05:20:04,746 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices 2018-07-21T05:20:04,746 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorImpl 2018-07-21T05:20:04,746 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher 2018-07-21T05:20:04,746 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler 2018-07-21T05:20:04,746 DEBUG [main] service.CompositeService: Adding service Dispatcher 2018-07-21T05:20:04,746 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl 2018-07-21T05:20:04,764 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer 2018-07-21T05:20:04,765 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.ContainerManagerEventType for class org.apache.hadoop.yarn.server.MiniYARNCluster$CustomContainerManagerImpl 2018-07-21T05:20:04,765 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.NodeManagerEventType for class org.apache.hadoop.yarn.server.MiniYARNCluster$ShortCircuitedNodeManager 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: Adding service Dispatcher 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.util.JvmPauseMonitor 2018-07-21T05:20:04,765 INFO [main] impl.MetricsSystemImpl: NodeManager metrics system started (again) 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: NodeManager: initing services, size=9 2018-07-21T05:20:04,765 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.DeletionService entered state INITED 2018-07-21T05:20:04,765 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService entered state INITED 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService 2018-07-21T05:20:04,765 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService: initing services, size=1 2018-07-21T05:20:04,765 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService entered state INITED 2018-07-21T05:20:04,856 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:04,905 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl entered state INITED 2018-07-21T05:20:04,905 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl entered state INITED 2018-07-21T05:20:04,905 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler 2018-07-21T05:20:04,905 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler 2018-07-21T05:20:04,905 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService 2018-07-21T05:20:04,905 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService 2018-07-21T05:20:04,905 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl: initing services, size=8 2018-07-21T05:20:04,905 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService entered state INITED 2018-07-21T05:20:06,061 DEBUG [main] service.CompositeService: Adding service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker 2018-07-21T05:20:06,061 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType for class org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker 2018-07-21T05:20:06,061 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService: initing services, size=1 2018-07-21T05:20:06,061 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker entered state INITED 2018-07-21T05:20:06,061 DEBUG [main] service.AbstractService: Service: containers-launcher entered state INITED 2018-07-21T05:20:06,061 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler entered state INITED 2018-07-21T05:20:06,061 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices entered state INITED 2018-07-21T05:20:06,062 DEBUG [main] service.AbstractService: Service: mapreduce_shuffle entered state INITED 2018-07-21T05:20:06,063 DEBUG [ShuffleHandler Netty Worker #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #0, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,079 DEBUG [ShuffleHandler Netty Worker #1] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #1, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,083 DEBUG [ShuffleHandler Netty Worker #2] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #2, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,083 DEBUG [ShuffleHandler Netty Worker #3] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #3, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,112 DEBUG [ShuffleHandler Netty Worker #4] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #4, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,121 DEBUG [ShuffleHandler Netty Worker #5] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #5, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,134 DEBUG [ShuffleHandler Netty Worker #6] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #6, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,154 DEBUG [ShuffleHandler Netty Worker #7] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #7, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,163 DEBUG [ShuffleHandler Netty Worker #8] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #8, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,170 DEBUG [ShuffleHandler Netty Worker #9] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #9, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,172 DEBUG [ShuffleHandler Netty Worker #10] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #10, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,174 DEBUG [ShuffleHandler Netty Worker #11] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #11, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,174 DEBUG [ShuffleHandler Netty Worker #12] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #12, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,175 DEBUG [ShuffleHandler Netty Worker #13] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #13, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,175 DEBUG [ShuffleHandler Netty Worker #14] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #14, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,175 DEBUG [ShuffleHandler Netty Worker #15] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Worker #15, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,206 DEBUG [ShuffleHandler Netty Boss #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ShuffleHandler Netty Boss #0, runnable type: org.jboss.netty.util.internal.DeadLockProofWorker$1 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: containers-monitor entered state INITED 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler entered state INITED 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService entered state INITED 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker entered state INITED 2018-07-21T05:20:06,207 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker: initing services, size=0 2018-07-21T05:20:06,207 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer entered state INITED 2018-07-21T05:20:06,208 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:06,208 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.util.JvmPauseMonitor entered state INITED 2018-07-21T05:20:06,208 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl entered state INITED 2018-07-21T05:20:06,209 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:06,209 DEBUG [main] service.AbstractService: Config has been overridden during init 2018-07-21T05:20:06,209 INFO [main] test.MiniTezCluster: Starting MiniTezCluster 2018-07-21T05:20:06,210 DEBUG [main] service.CompositeService: hive: starting services, size=3 2018-07-21T05:20:06,218 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:06,218 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore is started 2018-07-21T05:20:06,218 DEBUG [main] service.CompositeService: RMActiveServices: starting services, size=16 2018-07-21T05:20:06,274 INFO [main] delegation.AbstractDelegationTokenSecretManager: Updating the current master key for generating delegation tokens 2018-07-21T05:20:06,322 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.RMSecretManagerService is started 2018-07-21T05:20:06,322 INFO [Thread[Thread-314,5,main]] delegation.AbstractDelegationTokenSecretManager: Starting expired delegation token remover thread, tokenRemoverScanInterval=60 min(s) 2018-07-21T05:20:06,343 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer is started 2018-07-21T05:20:06,343 INFO [Thread[Thread-314,5,main]] delegation.AbstractDelegationTokenSecretManager: Updating the current master key for generating delegation tokens 2018-07-21T05:20:06,362 DEBUG [main] service.AbstractService: Service AMLivelinessMonitor is started 2018-07-21T05:20:06,405 DEBUG [main] service.AbstractService: Service AMLivelinessMonitor is started 2018-07-21T05:20:06,418 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.rmapp.monitor.RMAppLifetimeMonitor is started 2018-07-21T05:20:06,418 DEBUG [main] service.AbstractService: Service: Dispatcher entered state INITED 2018-07-21T05:20:06,420 INFO [main] event.AsyncDispatcher: Registering class org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEventType for class org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager$ForwardingEventHandler 2018-07-21T05:20:06,436 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:06,436 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager is started 2018-07-21T05:20:06,436 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.PlacementConstraintManagerService is started 2018-07-21T05:20:06,436 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.NodesListManager: starting services, size=0 2018-07-21T05:20:06,436 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.NodesListManager is started 2018-07-21T05:20:06,440 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivitiesManager is started 2018-07-21T05:20:06,441 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler is started 2018-07-21T05:20:06,460 DEBUG [main] service.AbstractService: Service SchedulerEventDispatcher is started 2018-07-21T05:20:06,472 DEBUG [main] service.AbstractService: Service NMLivelinessMonitor is started 2018-07-21T05:20:06,630 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.server.api.ResourceTrackerPB to the server 2018-07-21T05:20:07,446 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.ResourceTrackerService is started 2018-07-21T05:20:07,458 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:20:07,557 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@5ec6fede] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:20:07,599 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.api.ApplicationMasterProtocolPB to the server 2018-07-21T05:20:07,997 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService is started 2018-07-21T05:20:08,408 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.api.ApplicationClientProtocolPB to the server 2018-07-21T05:20:08,900 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.ClientRMService is started 2018-07-21T05:20:08,901 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher is started 2018-07-21T05:20:08,901 DEBUG [main] service.AbstractService: Service RMActiveServices is started 2018-07-21T05:20:08,922 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:08,970 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:20:08,972 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter RMAuthenticationFilter (class=org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilter) to context cluster 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter RMAuthenticationFilter (class=org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilter) to context logs 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter RMAuthenticationFilter (class=org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilter) to context static 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context cluster 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:20:08,978 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:20:08,979 INFO [main] http.HttpServer2: adding path spec: /cluster/* 2018-07-21T05:20:08,979 INFO [main] http.HttpServer2: adding path spec: /ws/* 2018-07-21T05:20:08,979 INFO [main] http.HttpServer2: adding path spec: /app/* 2018-07-21T05:20:08,979 INFO [main] http.HttpServer2: adding path spec: /proxy/* 2018-07-21T05:20:10,322 INFO [main] webapp.WebApps: Registered webapp guice modules 2018-07-21T05:20:10,323 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@375084c9{HTTP/1.1,[http/1.1]}{hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0}] 2018-07-21T05:20:10,324 INFO [main] http.HttpServer2: Jetty bound to port 37658 2018-07-21T05:20:10,441 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:10,490 INFO [main] delegation.AbstractDelegationTokenSecretManager: Updating the current master key for generating delegation tokens 2018-07-21T05:20:10,508 INFO [Thread[Thread-491,5,main]] delegation.AbstractDelegationTokenSecretManager: Starting expired delegation token remover thread, tokenRemoverScanInterval=60 min(s) 2018-07-21T05:20:10,510 INFO [Thread[Thread-491,5,main]] delegation.AbstractDelegationTokenSecretManager: Updating the current master key for generating delegation tokens 2018-07-21T05:20:10,981 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:10,988 DEBUG [main] jndi: supportDeepBinding=false 2018-07-21T05:20:10,988 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@629b780f 2018-07-21T05:20:11,006 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:11,006 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@698d6d30 2018-07-21T05:20:11,011 DEBUG [main] jndi: supportDeepBinding=false 2018-07-21T05:20:11,014 DEBUG [main] jndi: Adding binding with key=comp obj=Reference Class Name: javax.naming.Context Type: parser Content: org.eclipse.jetty.jndi.java.javaNameParser for context=null as comp: javax.naming.Reference:Reference Class Name: javax.naming.Context Type: parser Content: org.eclipse.jetty.jndi.java.javaNameParser 2018-07-21T05:20:11,014 DEBUG [main] jndi: Looking up name="org.glassfish.ejb.container.interceptor_binding_spi" 2018-07-21T05:20:11,016 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:11,016 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@6d4a05f7 2018-07-21T05:20:11,016 DEBUG [main] jndi: Looking up name="comp/BeanManager" 2018-07-21T05:20:11,016 DEBUG [main] jndi: Trying thread context classloader 2018-07-21T05:20:11,017 DEBUG [main] jndi: supportDeepBinding=false 2018-07-21T05:20:11,017 DEBUG [main] jndi: Made context comp for classloader: WebAppClassLoader=cluster@6682e6a5 2018-07-21T05:20:11,017 DEBUG [main] jndi: Looking up name="BeanManager" 2018-07-21T05:20:11,018 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:11,018 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@72fb989b 2018-07-21T05:20:11,018 DEBUG [main] jndi: Looking up name="com.sun.enterprise.container.common.spi.util.InjectionManager" 2018-07-21T05:20:11,048 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53446 2018-07-21T05:20:11,061 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53446 2018-07-21T05:20:11,064 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0001 with negotiated timeout 30000 for client /127.0.0.1:53446 2018-07-21T05:20:11,458 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53448 2018-07-21T05:20:11,461 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53448 2018-07-21T05:20:11,463 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0002 with negotiated timeout 30000 for client /127.0.0.1:53448 2018-07-21T05:20:12,243 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Got user-level KeeperException when processing sessionid:0x164bcc8430d0002 type:create cxid:0x8 zxid:0x6 txntype:-1 reqpath:n/a Error Path:/druid/discovery/druid:broker Error:KeeperErrorCode = NoNode for /druid/discovery/druid:broker 2018-07-21T05:20:12,268 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53450 2018-07-21T05:20:12,272 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53450 2018-07-21T05:20:12,274 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0003 with negotiated timeout 30000 for client /127.0.0.1:53450 2018-07-21T05:20:12,949 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Got user-level KeeperException when processing sessionid:0x164bcc8430d0001 type:create cxid:0x13 zxid:0xe txntype:-1 reqpath:n/a Error Path:/druid/coordinator/_COORDINATOR Error:KeeperErrorCode = NoNode for /druid/coordinator/_COORDINATOR 2018-07-21T05:20:13,096 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Got user-level KeeperException when processing sessionid:0x164bcc8430d0001 type:create cxid:0x1c zxid:0x12 txntype:-1 reqpath:n/a Error Path:/druid/overlord/_OVERLORD Error:KeeperErrorCode = NoNode for /druid/overlord/_OVERLORD 2018-07-21T05:20:13,345 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Got user-level KeeperException when processing sessionid:0x164bcc8430d0001 type:create cxid:0x25 zxid:0x16 txntype:-1 reqpath:n/a Error Path:/druid/discovery/druid:coordinator Error:KeeperErrorCode = NoNode for /druid/discovery/druid:coordinator 2018-07-21T05:20:13,948 WARN [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxn: caught end of stream exception org.apache.zookeeper.server.ServerCnxn$EndOfStreamException: Unable to read additional data from client sessionid 0x164bcc8430d0002, likely client has closed socket at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:228) [zookeeper-3.4.6.jar:3.4.6-1569965] at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:208) [zookeeper-3.4.6.jar:3.4.6-1569965] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_102] 2018-07-21T05:20:14,215 INFO [main] webapp.WebApps: Web app cluster started at 37658 2018-07-21T05:20:14,216 DEBUG [main] service.CompositeService: ResourceManager: starting services, size=3 2018-07-21T05:20:14,236 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:14,448 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocolPB to the server 2018-07-21T05:20:14,451 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.AdminService: starting services, size=0 2018-07-21T05:20:14,451 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.AdminService is started 2018-07-21T05:20:14,451 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter: starting services, size=0 2018-07-21T05:20:14,451 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter is started 2018-07-21T05:20:14,451 DEBUG [main] service.AbstractService: Service ResourceManager is started 2018-07-21T05:20:14,452 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.MiniYARNCluster$ResourceManagerWrapper_0 is started 2018-07-21T05:20:14,453 DEBUG [main] service.CompositeService: NodeManager: starting services, size=9 2018-07-21T05:20:14,453 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.DeletionService is started 2018-07-21T05:20:14,453 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService: starting services, size=1 2018-07-21T05:20:14,464 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService is started 2018-07-21T05:20:14,464 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService is started 2018-07-21T05:20:14,464 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl is started 2018-07-21T05:20:14,614 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.api.ContainerManagementProtocolPB to the server 2018-07-21T05:20:14,655 WARN [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxn: caught end of stream exception org.apache.zookeeper.server.ServerCnxn$EndOfStreamException: Unable to read additional data from client sessionid 0x164bcc8430d0003, likely client has closed socket at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:228) [zookeeper-3.4.6.jar:3.4.6-1569965] at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:208) [zookeeper-3.4.6.jar:3.4.6-1569965] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_102] 2018-07-21T05:20:14,656 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl: starting services, size=8 2018-07-21T05:20:14,706 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocolPB to the server 2018-07-21T05:20:14,752 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService: starting services, size=1 2018-07-21T05:20:14,758 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker is started 2018-07-21T05:20:14,877 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService is started 2018-07-21T05:20:14,877 DEBUG [main] service.AbstractService: Service containers-launcher is started 2018-07-21T05:20:14,877 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler is started 2018-07-21T05:20:14,905 INFO [main] mapred.IndexCache: IndexCache created with max memory = 10485760 2018-07-21T05:20:14,941 INFO [main] mapred.ShuffleHandler: mapreduce_shuffle listening on port 42650 2018-07-21T05:20:14,942 DEBUG [main] service.AbstractService: Service mapreduce_shuffle is started 2018-07-21T05:20:14,942 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices is started 2018-07-21T05:20:14,942 DEBUG [main] service.AbstractService: Service containers-monitor is started 2018-07-21T05:20:14,953 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:14,953 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler is started 2018-07-21T05:20:14,953 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService is started 2018-07-21T05:20:14,953 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl is started 2018-07-21T05:20:14,953 WARN [main] tracker.NMLogAggregationStatusTracker: Log Aggregation is disabled.So is the LogAggregationStatusTracker. 2018-07-21T05:20:14,953 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker is started 2018-07-21T05:20:14,956 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:14,964 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:20:14,965 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:20:14,966 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context node 2018-07-21T05:20:14,966 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:20:14,966 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:20:14,968 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context node 2018-07-21T05:20:14,968 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context logs 2018-07-21T05:20:14,968 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context static 2018-07-21T05:20:14,968 INFO [main] http.HttpServer2: adding path spec: /node/* 2018-07-21T05:20:14,968 INFO [main] http.HttpServer2: adding path spec: /ws/* 2018-07-21T05:20:15,004 INFO [main] webapp.WebApps: Registered webapp guice modules 2018-07-21T05:20:15,005 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@7be3abaa{HTTP/1.1,[http/1.1]}{hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0}] 2018-07-21T05:20:15,005 INFO [main] http.HttpServer2: Jetty bound to port 44572 2018-07-21T05:20:15,009 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:15,096 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,096 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@3f1d6a13 2018-07-21T05:20:15,096 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,096 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@5f1f0ee6 2018-07-21T05:20:15,096 DEBUG [main] jndi: Looking up name="org.glassfish.ejb.container.interceptor_binding_spi" 2018-07-21T05:20:15,097 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,097 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@326706d 2018-07-21T05:20:15,097 DEBUG [main] jndi: Looking up name="comp/BeanManager" 2018-07-21T05:20:15,097 DEBUG [main] jndi: Trying thread context classloader 2018-07-21T05:20:15,097 DEBUG [main] jndi: supportDeepBinding=false 2018-07-21T05:20:15,097 DEBUG [main] jndi: Made context comp for classloader: WebAppClassLoader=node@23bd2f6e 2018-07-21T05:20:15,097 DEBUG [main] jndi: Looking up name="BeanManager" 2018-07-21T05:20:15,097 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,097 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@76fc5687 2018-07-21T05:20:15,097 DEBUG [main] jndi: Looking up name="com.sun.enterprise.container.common.spi.util.InjectionManager" 2018-07-21T05:20:15,308 INFO [main] webapp.WebApps: Web app node started at 44572 2018-07-21T05:20:15,309 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer is started 2018-07-21T05:20:15,313 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:15,326 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@74a5bef0] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:20:15,327 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:20:15,343 WARN [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxn: caught end of stream exception org.apache.zookeeper.server.ServerCnxn$EndOfStreamException: Unable to read additional data from client sessionid 0x164bcc8430d0001, likely client has closed socket at org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:228) [zookeeper-3.4.6.jar:3.4.6-1569965] at org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:208) [zookeeper-3.4.6.jar:3.4.6-1569965] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_102] 2018-07-21T05:20:15,458 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl is started 2018-07-21T05:20:15,473 DEBUG [main] service.AbstractService: Service NodeManager is started 2018-07-21T05:20:15,474 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_0 is started 2018-07-21T05:20:15,474 DEBUG [main] service.CompositeService: NodeManager: starting services, size=9 2018-07-21T05:20:15,474 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.DeletionService is started 2018-07-21T05:20:15,474 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService: starting services, size=1 2018-07-21T05:20:15,484 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService is started 2018-07-21T05:20:15,484 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService is started 2018-07-21T05:20:15,485 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeResourceMonitorImpl is started 2018-07-21T05:20:15,528 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.api.ContainerManagementProtocolPB to the server 2018-07-21T05:20:15,549 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl: starting services, size=8 2018-07-21T05:20:15,553 INFO [main] pb.RpcServerFactoryPBImpl: Adding protocol org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocolPB to the server 2018-07-21T05:20:15,554 DEBUG [main] service.CompositeService: org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService: starting services, size=1 2018-07-21T05:20:15,554 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService$LocalizerTracker is started 2018-07-21T05:20:15,657 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService is started 2018-07-21T05:20:15,657 DEBUG [main] service.AbstractService: Service containers-launcher is started 2018-07-21T05:20:15,657 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler is started 2018-07-21T05:20:15,658 INFO [main] mapred.IndexCache: IndexCache created with max memory = 10485760 2018-07-21T05:20:15,664 INFO [main] mapred.ShuffleHandler: mapreduce_shuffle listening on port 51944 2018-07-21T05:20:15,664 DEBUG [main] service.AbstractService: Service mapreduce_shuffle is started 2018-07-21T05:20:15,664 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices is started 2018-07-21T05:20:15,664 DEBUG [main] service.AbstractService: Service containers-monitor is started 2018-07-21T05:20:15,665 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:15,665 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.NonAggregatingLogHandler is started 2018-07-21T05:20:15,665 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploadService is started 2018-07-21T05:20:15,665 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl is started 2018-07-21T05:20:15,665 WARN [main] tracker.NMLogAggregationStatusTracker: Log Aggregation is disabled.So is the LogAggregationStatusTracker. 2018-07-21T05:20:15,665 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.logaggregation.tracker.NMLogAggregationStatusTracker is started 2018-07-21T05:20:15,669 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:15,670 WARN [main] http.HttpRequestLog: Jetty request log can only be enabled using Log4j 2018-07-21T05:20:15,671 INFO [main] http.HttpServer2: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 2018-07-21T05:20:15,672 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context node 2018-07-21T05:20:15,673 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs 2018-07-21T05:20:15,673 INFO [main] http.HttpServer2: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 2018-07-21T05:20:15,674 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context node 2018-07-21T05:20:15,674 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context logs 2018-07-21T05:20:15,674 INFO [main] http.HttpServer2: Added filter authentication (class=org.apache.hadoop.security.authentication.server.AuthenticationFilter) to context static 2018-07-21T05:20:15,675 INFO [main] http.HttpServer2: adding path spec: /node/* 2018-07-21T05:20:15,675 INFO [main] http.HttpServer2: adding path spec: /ws/* 2018-07-21T05:20:15,711 INFO [main] webapp.WebApps: Registered webapp guice modules 2018-07-21T05:20:15,714 DEBUG [main] http.HttpServer2: opening listeners: [ServerConnector@6c9151c1{HTTP/1.1,[http/1.1]}{hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0}] 2018-07-21T05:20:15,714 INFO [main] http.HttpServer2: Jetty bound to port 59491 2018-07-21T05:20:15,721 INFO [main] server.AuthenticationFilter: Unable to initialize FileSignerSecretProvider, falling back to use random secrets. 2018-07-21T05:20:15,814 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,814 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@13aa54f1 2018-07-21T05:20:15,814 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,814 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@5600a278 2018-07-21T05:20:15,814 DEBUG [main] jndi: Looking up name="org.glassfish.ejb.container.interceptor_binding_spi" 2018-07-21T05:20:15,814 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,814 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@549d14fe 2018-07-21T05:20:15,814 DEBUG [main] jndi: Looking up name="comp/BeanManager" 2018-07-21T05:20:15,814 DEBUG [main] jndi: Trying thread context classloader 2018-07-21T05:20:15,814 DEBUG [main] jndi: supportDeepBinding=false 2018-07-21T05:20:15,814 DEBUG [main] jndi: Made context comp for classloader: WebAppClassLoader=node@63d0e8d 2018-07-21T05:20:15,814 DEBUG [main] jndi: Looking up name="BeanManager" 2018-07-21T05:20:15,814 DEBUG [main] jndi: InitialContextFactory.getInitialContext() 2018-07-21T05:20:15,815 DEBUG [main] jndi: Created initial context delegate for local namespace:org.eclipse.jetty.jndi.local.localContextRoot@77d54a41 2018-07-21T05:20:15,815 DEBUG [main] jndi: Looking up name="com.sun.enterprise.container.common.spi.util.InjectionManager" 2018-07-21T05:20:15,998 INFO [main] webapp.WebApps: Web app node started at 59491 2018-07-21T05:20:15,998 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer is started 2018-07-21T05:20:15,998 DEBUG [main] service.AbstractService: Service Dispatcher is started 2018-07-21T05:20:15,999 DEBUG [main] service.AbstractService: Service org.apache.hadoop.util.JvmPauseMonitor is started 2018-07-21T05:20:15,999 INFO [org.apache.hadoop.util.JvmPauseMonitor$Monitor@13d10057] util.JvmPauseMonitor: Starting JVM pause monitor 2018-07-21T05:20:16,001 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl is started 2018-07-21T05:20:16,001 DEBUG [main] service.AbstractService: Service NodeManager is started 2018-07-21T05:20:16,001 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper_1 is started 2018-07-21T05:20:16,086 INFO [main] test.MiniTezCluster: Setting yarn-site.xml via YARN-APP-CP at: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/hive,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:20:16,086 DEBUG [main] service.AbstractService: Service hive is started 2018-07-21T05:20:16,135 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:16,139 INFO [main] SessionState: Hive Session ID = ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:16,145 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:16,248 DEBUG [main] exec.Utilities: Create dirs /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with permission rwx-wx-wx recursive true 2018-07-21T05:20:16,248 DEBUG [main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:20:16,248 DEBUG [main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:20:16,248 DEBUG [main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:20:16,248 DEBUG [main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:20:16,249 DEBUG [main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:20:16,252 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:16,255 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest 2018-07-21T05:20:16,261 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir 2018-07-21T05:20:16,270 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:16,277 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:16,280 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:17,195 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:17,218 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:17,256 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:17,264 INFO [main] conf.MetastoreConf: Found configuration file file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf/hivemetastore-site.xml 2018-07-21T05:20:17,265 INFO [main] conf.MetastoreConf: Unable to find config file metastore-site.xml 2018-07-21T05:20:17,265 INFO [main] conf.MetastoreConf: Found configuration file null 2018-07-21T05:20:17,266 DEBUG [main] conf.MetastoreConf: Picking up system property hive.root with value /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../ 2018-07-21T05:20:17,281 DEBUG [main] conf.MetastoreConf: Picking up system property hive.version with value 4.0.0-SNAPSHOT 2018-07-21T05:20:17,281 DEBUG [main] conf.MetastoreConf: Picking up system property hive.test.console.log.level with value OFF 2018-07-21T05:20:17,283 DEBUG [main] conf.MetastoreConf: MetastoreConf object: Used hive-site file: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/conf/llap//hive-site.xml Used hivemetastore-site file: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf/hivemetastore-site.xml Key: old hive key: value: <> Key: old hive key: value: <0.8> Key: old hive key: value: Key: old hive key: value: <0.01> Key: old hive key: value: <0.9> Key: old hive key: value: <10000> Key: old hive key: value: <1000ms> Key: old hive key: value: <0.01> Key: old hive key: value: <5000ms> Key: old hive key: value: <10000> Key: old hive key: value: <600s> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <300> Key: old hive key: value: <1000> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <60s> Key: old hive key: value: <.*> Key: old hive key: value: <> Key: old hive key: value: <1Gb> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1s> Key: old hive key: value: <> Key: old hive key: value: <0s> Key: old hive key: value: <600s> Key: old hive key: value: <2m> Key: old hive key: value: <2> Key: old hive key: value: <3> Key: old hive key: value: <3> Key: old hive key: value: <2> Key: old hive key: value: Key: old hive key: value: <0> Key: old hive key: value: Key: old hive key: value: <10> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1s> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <__HIVE_DEFAULT_PARTITION__> Key: old hive key: value: <1d> Key: old hive key: value: <1h> Key: old hive key: value: <7d> Key: old hive key: value: <1d> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1000> Key: old hive key: value: <1000> Key: old hive key: value: <100> Key: old hive key: value: <0> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <0s> Key: old hive key: value: <0s> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <86400s> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <15> Key: old hive key: value: <10> Key: old hive key: value: Key: old hive key: value: <2000ms> Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <-1> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <3600s> Key: old hive key: value: <86400s> Key: old hive key: value: <3600s> Key: old hive key: value: <259200s> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1m> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <100000> Key: old hive key: value: Key: old hive key: value: <5> Key: old hive key: value: <500ms> Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <24h> Key: old hive key: value: <3600s> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1> Key: old hive key: value: <33554432> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <104857600> Key: old hive key: value: <1000> Key: old hive key: value: <200> Key: old hive key: value: <9083> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <0.0> Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <5m> Key: old hive key: value: <1> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <10> Key: old hive key: value: <3> Key: old hive key: value: <1> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <100s> Key: old hive key: value: <180s> Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <1000> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: <300s> Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <60s> Key: old hive key: value: <4> Key: old hive key: value: <-1> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <> Key: old hive key: value: <> Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: Key: old hive key: value: <42> Key: old hive key: value: <3.141592654> Key: old hive key: value: <1s> Key: old hive key: value: <1s> Key: old hive key: value: <1s> Key: old hive key: value: Key: old hive key: value: <> Finished MetastoreConf object. 2018-07-21T05:20:18,061 DEBUG [main] hikari.HikariConfig: HikariPool-1 - configuration: 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: allowPoolSuspension.............false 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: autoCommit......................true 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: catalog.........................none 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: connectionInitSql...............none 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: connectionTestQuery.............none 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: connectionTimeout...............30000 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: dataSource......................none 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: dataSourceClassName.............none 2018-07-21T05:20:18,065 DEBUG [main] hikari.HikariConfig: dataSourceJNDI..................none 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: dataSourceProperties............{password=} 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: driverClassName................."org.apache.derby.jdbc.EmbeddedDriver" 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: healthCheckProperties...........{} 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: healthCheckRegistry.............none 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: idleTimeout.....................600000 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: initializationFailFast..........true 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: initializationFailTimeout.......1 2018-07-21T05:20:18,066 DEBUG [main] hikari.HikariConfig: isolateInternalQueries..........false 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: jdbc4ConnectionTest.............false 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: jdbcUrl........................."jdbc:derby:;databaseName=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db;create=true" 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: leakDetectionThreshold..........0 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: maxLifetime.....................1800000 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: maximumPoolSize.................10 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: metricRegistry..................none 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: metricsTrackerFactory...........none 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: minimumIdle.....................10 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: password........................ 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: poolName........................"HikariPool-1" 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: readOnly........................false 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: registerMbeans..................false 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: scheduledExecutor...............none 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: scheduledExecutorService........internal 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: threadFactory...................internal 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: transactionIsolation............default 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: username........................"APP" 2018-07-21T05:20:18,067 DEBUG [main] hikari.HikariConfig: validationTimeout...............5000 2018-07-21T05:20:18,069 INFO [main] hikari.HikariDataSource: HikariPool-1 - Starting... 2018-07-21T05:20:18,079 WARN [main] util.DriverDataSource: Registered driver with driverClassName=org.apache.derby.jdbc.EmbeddedDriver was not found, trying direct instantiation. 2018-07-21T05:20:19,113 INFO [main] pool.PoolBase: HikariPool-1 - Driver does not support get/set network timeout for connections. (Feature not implemented: No details.) 2018-07-21T05:20:19,116 DEBUG [main] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,121 INFO [main] hikari.HikariDataSource: HikariPool-1 - Start completed. 2018-07-21T05:20:19,178 DEBUG [main] hikari.HikariConfig: HikariPool-2 - configuration: 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: allowPoolSuspension.............false 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: autoCommit......................true 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: catalog.........................none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: connectionInitSql...............none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: connectionTestQuery.............none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: connectionTimeout...............30000 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: dataSource......................none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: dataSourceClassName.............none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: dataSourceJNDI..................none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: dataSourceProperties............{password=} 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: driverClassName................."org.apache.derby.jdbc.EmbeddedDriver" 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: healthCheckProperties...........{} 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: healthCheckRegistry.............none 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: idleTimeout.....................600000 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: initializationFailFast..........true 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: initializationFailTimeout.......1 2018-07-21T05:20:19,179 DEBUG [main] hikari.HikariConfig: isolateInternalQueries..........false 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: jdbc4ConnectionTest.............false 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: jdbcUrl........................."jdbc:derby:;databaseName=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db;create=true" 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: leakDetectionThreshold..........0 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: maxLifetime.....................1800000 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: maximumPoolSize.................10 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: metricRegistry..................none 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: metricsTrackerFactory...........none 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: minimumIdle.....................10 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: password........................ 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: poolName........................"HikariPool-2" 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: readOnly........................false 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: registerMbeans..................false 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: scheduledExecutor...............none 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: scheduledExecutorService........internal 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: threadFactory...................internal 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: transactionIsolation............default 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: username........................"APP" 2018-07-21T05:20:19,180 DEBUG [main] hikari.HikariConfig: validationTimeout...............5000 2018-07-21T05:20:19,180 INFO [main] hikari.HikariDataSource: HikariPool-2 - Starting... 2018-07-21T05:20:19,180 WARN [main] util.DriverDataSource: Registered driver with driverClassName=org.apache.derby.jdbc.EmbeddedDriver was not found, trying direct instantiation. 2018-07-21T05:20:19,191 INFO [main] pool.PoolBase: HikariPool-2 - Driver does not support get/set network timeout for connections. (Feature not implemented: No details.) 2018-07-21T05:20:19,192 DEBUG [main] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,192 INFO [main] hikari.HikariDataSource: HikariPool-2 - Start completed. 2018-07-21T05:20:19,221 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=1, active=0, idle=1, waiting=0) 2018-07-21T05:20:19,228 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@877721801 (XID = 170), (SESSIONID = 5), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,237 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1970139890 (XID = 172), (SESSIONID = 7), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,241 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1146258794 (XID = 174), (SESSIONID = 9), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,251 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@356650429 (XID = 176), (SESSIONID = 11), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,255 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@263841248 (XID = 178), (SESSIONID = 13), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,259 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@286061646 (XID = 180), (SESSIONID = 15), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,268 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@201470823 (XID = 183), (SESSIONID = 17), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,271 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1606284223 (XID = 185), (SESSIONID = 19), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,275 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@751628028 (XID = 187), (SESSIONID = 21), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,275 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - After adding stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:20:19,293 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=1, active=1, idle=0, waiting=0) 2018-07-21T05:20:19,305 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@849388947 (XID = 189), (SESSIONID = 23), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,310 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1939713960 (XID = 191), (SESSIONID = 25), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,314 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1118060173 (XID = 193), (SESSIONID = 27), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,317 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@330453223 (XID = 195), (SESSIONID = 29), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,321 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@2040223006 (XID = 197), (SESSIONID = 31), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,324 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@402436084 (XID = 199), (SESSIONID = 33), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,327 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1821920913 (XID = 201), (SESSIONID = 35), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,331 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1341482251 (XID = 203), (SESSIONID = 37), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,341 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1748549951 (XID = 205), (SESSIONID = 39), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,341 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - After adding stats (total=10, active=1, idle=9, waiting=0) 2018-07-21T05:20:19,914 INFO [main] metastore.ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order" 2018-07-21T05:20:19,954 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,957 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:19,960 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:24,038 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:25,733 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,017 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,193 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,264 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,337 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,740 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,883 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:26,964 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,044 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,051 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,064 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,066 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:27,200 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,206 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,211 DEBUG [main] metastore.MetaStoreSchemaInfoFactory: HIVE_HOME is not set. Using current directory instead 2018-07-21T05:20:27,217 WARN [main] metastore.ObjectStore: Version information not found in metastore. metastore.schema.verification is not enabled so recording the schema version 3.1.0 2018-07-21T05:20:27,217 WARN [main] metastore.ObjectStore: setMetaStoreSchemaVersion called but recording version is disabled: version = 3.1.0, comment = Set by MetaStore hiveptest@10.128.0.18 2018-07-21T05:20:27,280 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,290 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,425 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,446 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,459 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,459 WARN [main] metastore.ObjectStore: Failed to get database hive.default, returning NoSuchObjectException 2018-07-21T05:20:27,481 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,497 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,518 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,576 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,600 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,604 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,605 INFO [main] metastore.HiveMetaStore: Added admin role in metastore 2018-07-21T05:20:27,607 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,608 INFO [main] metastore.HiveMetaStore: Added public role in metastore 2018-07-21T05:20:27,682 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,740 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,759 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:27,769 INFO [main] metastore.HiveMetaStore: No user is added in admin role, since config is empty 2018-07-21T05:20:27,782 DEBUG [main] metastore.ThreadPool: ThreadPool initialized 2018-07-21T05:20:27,820 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:27,978 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:28,016 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:28,020 INFO [main] metastore.HiveMetaStore: 0: get_all_functions 2018-07-21T05:20:28,025 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_all_functions 2018-07-21T05:20:28,175 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:28,201 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:28,201 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:28,210 INFO [main] metadata.HiveMaterializedViewsRegistry: Using dummy materialized views registry 2018-07-21T05:20:28,323 INFO [main] control.CoreCliDriver: QtestUtil instance created ElapsedTime(ms)=37439 2018-07-21T05:20:28,346 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0000 2018-07-21T05:20:28,348 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0000 closed 2018-07-21T05:20:28,350 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@44e19b99 2018-07-21T05:20:28,352 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53523 2018-07-21T05:20:28,352 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53523 2018-07-21T05:20:28,353 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:28,354 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0004 with negotiated timeout 40000 for client /127.0.0.1:53523 2018-07-21T05:20:28,355 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:28,365 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:28,382 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:28,382 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:28,390 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:28,391 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:28,392 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:28,392 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:28,392 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:28,392 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:28,392 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:28,392 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:28,394 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:28,394 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:28,398 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:28,405 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:28,409 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:28,417 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:28,422 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:28,491 INFO [main] tez.TezSessionState: User of session id ee745c13-27f8-4940-a347-c8307a2da8be is hiveptest 2018-07-21T05:20:28,500 INFO [main] tez.TezSessionState: Created new resources: null 2018-07-21T05:20:28,500 INFO [main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:20:28,925 INFO [main] tez.TezSessionState: Computed sha: 92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91 for file: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar of length: 40.03MB in 419 ms 2018-07-21T05:20:28,926 DEBUG [main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:28,928 INFO [main] tez.DagUtils: Localizing resource because it does not exist: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar to dest: hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:28,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:20:28,953 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:28,955 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,289 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:29,301 INFO [main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,311 INFO [main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:20:29,315 INFO [main] tez.TezSessionState: Computed sha: d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f for file: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar of length: 123.44KB in 1 ms 2018-07-21T05:20:29,315 DEBUG [main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar] is hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:20:29,316 INFO [main] tez.DagUtils: Localizing resource because it does not exist: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar to dest: hdfs://localhost:35925/user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:20:29,323 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:29,323 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:29,323 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:20:29,323 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:20:29,323 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:29,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:20:29,324 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741827_1003, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:20:29,385 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:29,388 INFO [main] tez.DagUtils: Resource modification time: 1532175629385 for hdfs://localhost:35925/user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:20:29,390 INFO [main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:20:29,392 DEBUG [main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,395 INFO [main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,398 INFO [main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:20:29,400 DEBUG [main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,402 INFO [main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:20:29,404 INFO [main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:20:29,410 INFO [main] tez.TezSessionState: Computed sha: 8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b for file: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar of length: 219.03KB in 2 ms 2018-07-21T05:20:29,410 DEBUG [main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar] is hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:20:29,411 INFO [main] tez.DagUtils: Localizing resource because it does not exist: file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar to dest: hdfs://localhost:35925/user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:29,423 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:20:29,423 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741828_1004, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:20:29,464 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:29,467 INFO [main] tez.DagUtils: Resource modification time: 1532175629464 for hdfs://localhost:35925/user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=10, tez(original):tez.runtime.io.sort.mb=24, tez(final):tez.runtime.io.sort.mb=24 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=5, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=5 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.05, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.05 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=10, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=10 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=20000, tez(final):tez.runtime.shuffle.connect.timeout=20000 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=0.4, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.4 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.node-blacklisting.enable, mr initial value=false, tez(original):tez.am.node-blacklisting.enabled=false, tez(final):tez.am.node-blacklisting.enabled=false 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=1024, tez(final):tez.counters.max=1024 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2018-07-21T05:20:29,517 INFO [main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2018-07-21T05:20:29,583 INFO [main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:20:29,590 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:29,591 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:29,591 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:29,591 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:29,591 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:29,591 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:29,602 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:29,607 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:29,607 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:29,607 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:29,607 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:29,613 DEBUG [main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:20:29,629 DEBUG [main] tez.TezSessionState: Setting Tez Session access for sessionId=ee745c13-27f8-4940-a347-c8307a2da8be with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:20:29,653 INFO [main] client.TezClient: Tez Client Version: [ component=tez-api, version=0.9.1, revision=23b58b2b996eee255aab1a045412de00677ca2f1, SCM-URL=scm:git:https://git-wip-us.apache.org/repos/asf/tez.git, buildTime=2017-12-13T00:06:01Z ] 2018-07-21T05:20:29,653 INFO [main] tez.TezSessionState: Opening new Tez Session (id: ee745c13-27f8-4940-a347-c8307a2da8be, scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be) 2018-07-21T05:20:29,667 DEBUG [main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state INITED 2018-07-21T05:20:29,710 INFO [main] client.RMProxy: Connecting to ResourceManager at hive-ptest-slaves-a56.c.gcp-hive-upstream.internal/10.128.0.18:59658 2018-07-21T05:20:29,888 DEBUG [main] service.AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl is started 2018-07-21T05:20:29,891 INFO [main] client.TezClient: Session mode. Starting session. 2018-07-21T05:20:29,898 INFO [main] client.TezClientUtils: Using tez.lib.uris value from configuration: hdfs://localhost:35925/user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:20:29,898 INFO [main] client.TezClientUtils: Using tez.lib.uris.classpath value from configuration: null 2018-07-21T05:20:30,027 INFO [main] client.TezClient: Tez system stage directory hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001 doesn't exist and is created 2018-07-21T05:20:30,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:30,091 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:30,095 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741829_1005, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/tez-conf.pb 2018-07-21T05:20:30,172 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/tez-conf.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:20:30,233 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:20:30,234 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741830_1006, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/tez.session.local-resources.pb 2018-07-21T05:20:30,303 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/tez.session.local-resources.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:30,755 INFO [main] impl.YarnClientImpl: Submitted application application_1532175606211_0001 2018-07-21T05:20:30,759 INFO [main] client.TezClient: The url to track the Tez Session: http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0001/ 2018-07-21T05:20:31,222 DEBUG [ApplicationMasterLauncher #0] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:20:31,227 DEBUG [ApplicationMasterLauncher #0] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:20:31,260 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:20:31,552 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:20:31,607 DEBUG [PublicLocalizer #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: PublicLocalizer #0, runnable type: java.util.concurrent.ExecutorCompletionService$QueueingFuture 2018-07-21T05:20:31,883 DEBUG [PublicLocalizer #0] concurrent.ExecutorHelper: afterExecute in thread: PublicLocalizer #0, runnable type: java.util.concurrent.ExecutorCompletionService$QueueingFuture 2018-07-21T05:20:31,999 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:20:31,999 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:20:31,999 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:20:31,999 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:20:32,000 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:20:32,004 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:20:32,010 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:20:32,011 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:20:32,011 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:20:32,011 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:20:32,607 DEBUG [ContainersLauncher #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #0, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:20:33,642 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:20:33,646 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:20:39,995 INFO [Socket Reader #1 for port 60399] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:20:40,179 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53604 2018-07-21T05:20:40,182 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53604 2018-07-21T05:20:40,184 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0005 with negotiated timeout 40000 for client /127.0.0.1:53604 2018-07-21T05:20:40,300 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Got user-level KeeperException when processing sessionid:0x164bcc8430d0005 type:create cxid:0x3 zxid:0x21 txntype:-1 reqpath:n/a Error Path:/tez-am-unsecure/user-hiveptest/llap/workers Error:KeeperErrorCode = NoNode for /tez-am-unsecure/user-hiveptest/llap/workers 2018-07-21T05:20:40,464 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:20:40,465 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:40,466 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:40,466 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:40,468 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:40,468 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:40,468 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:40,468 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,469 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:40,469 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:40,469 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:40,469 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:40,469 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,469 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:40,470 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:40,470 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:40,474 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:40,480 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:40,483 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:40,492 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:40,494 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:40,495 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,496 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:40,496 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:40,497 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,497 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,497 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,501 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,502 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,502 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:40,502 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,502 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:40,502 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,502 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:20:40,503 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:20:40,514 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,514 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,515 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,515 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:20:40,515 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:20:40,534 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,534 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,535 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,535 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:20:40,535 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:20:40,537 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,537 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,537 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,537 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:20:40,537 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:20:40,538 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,539 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,548 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,555 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,555 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,555 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,555 INFO [main] metastore.HiveMetaStore: 0: get_functions: db=@hive#default pat=.* 2018-07-21T05:20:40,555 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_functions: db=@hive#default pat=.* 2018-07-21T05:20:40,579 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:40,579 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:40,580 INFO [main] QTestUtil: Cleanup (/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/scripts/q_test_cleanup_druid.sql): DROP TABLE IF EXISTS alltypesorc; DROP TABLE IF EXISTS druid_table; 2018-07-21T05:20:40,580 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:40,580 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:40,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:40,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:40,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:40,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:40,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:40,664 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052040_bc0baf5f-9070-4303-9235-27c7b620c107): DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:40,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:40,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:41,608 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:41,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,660 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,665 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,665 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:41,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,705 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2018-07-21T05:20:41,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,714 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:41,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getTables_(String, String, )=23, getAllDatabases_()=14, flushCache_()=0, listRoleNames_()=7, getFunctions_(String, String, )=24} 2018-07-21T05:20:41,715 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052040_bc0baf5f-9070-4303-9235-27c7b620c107); Time taken: 1.114 seconds 2018-07-21T05:20:41,716 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:41,717 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,717 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,717 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052040_bc0baf5f-9070-4303-9235-27c7b620c107): DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:41,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,719 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:41,719 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: DROPTABLE 2018-07-21T05:20:41,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,739 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:DDL] in serial mode 2018-07-21T05:20:41,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,740 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,740 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,742 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Table default.alltypesorc not found: hive.default.alltypesorc table not found 2018-07-21T05:20:41,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:41,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,771 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:41,771 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: DROPTABLE 2018-07-21T05:20:41,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,774 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:41,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, dropTable_(String, String, boolean, boolean, boolean, )=2} 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052040_bc0baf5f-9070-4303-9235-27c7b620c107); Time taken: 0.057 seconds 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:41,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query DROP TABLE IF EXISTS alltypesorc 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 1.178 seconds 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,775 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:41,775 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,775 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:41,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:41,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,777 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052041_1cdd1f05-e28f-42fa-8bd7-c16ba8408e46): DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:41,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=1, flushCache_()=0} 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052041_1cdd1f05-e28f-42fa-8bd7-c16ba8408e46); Time taken: 0.005 seconds 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,782 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052041_1cdd1f05-e28f-42fa-8bd7-c16ba8408e46): DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: DROPTABLE 2018-07-21T05:20:41,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:DDL] in serial mode 2018-07-21T05:20:41,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,785 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Table default.druid_table not found: hive.default.druid_table table not found 2018-07-21T05:20:41,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,785 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,785 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table 2018-07-21T05:20:41,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,787 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: DROPTABLE 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:41,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, dropTable_(String, String, boolean, boolean, boolean, )=2} 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052041_1cdd1f05-e28f-42fa-8bd7-c16ba8408e46); Time taken: 0.005 seconds 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:41,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:41,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query DROP TABLE IF EXISTS druid_table 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.011 seconds 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:41,789 INFO [main] control.CoreCliDriver: Initialization cleanup done. ElapsedTime(ms)=13465 2018-07-21T05:20:41,790 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,791 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:41,801 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files 2018-07-21T05:20:41,802 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,802 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:41,802 INFO [main] QTestUtil: Initial setup (/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/scripts/q_test_druid_init.sql): 2018-07-21T05:20:41,802 INFO [main] QTestUtil: Result from cliDrriver.processLine in createSources=0 2018-07-21T05:20:41,802 INFO [main] control.CoreCliDriver: Initialization createSources done. ElapsedTime(ms)=11 2018-07-21T05:20:41,820 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0004 2018-07-21T05:20:41,822 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0004 closed 2018-07-21T05:20:41,824 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@3bc82aa3 2018-07-21T05:20:41,826 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53614 2018-07-21T05:20:41,826 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53614 2018-07-21T05:20:41,827 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:41,828 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:41,828 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0006 with negotiated timeout 40000 for client /127.0.0.1:53614 2018-07-21T05:20:41,828 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,830 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:41,831 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:41,831 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:41,831 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:41,831 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:41,831 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:41,831 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:41,831 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:41,831 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:41,831 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,833 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:41,833 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:41,835 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:41,853 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,856 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,864 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,867 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:41,868 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:20:41,868 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:20:41,868 INFO [main] control.CoreCliDriver: PerTestSetup done. ElapsedTime(ms)=60 2018-07-21T05:20:41,869 INFO [main] control.CoreCliDriver: Begin query: druidmini_dynamic_partition.q 2018-07-21T05:20:41,932 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0006 2018-07-21T05:20:41,933 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0006 closed 2018-07-21T05:20:41,935 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@497ccf3 2018-07-21T05:20:41,936 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:41,937 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53615 2018-07-21T05:20:41,943 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53615 2018-07-21T05:20:41,944 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:41,944 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0007 with negotiated timeout 40000 for client /127.0.0.1:53615 2018-07-21T05:20:41,947 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,951 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:41,951 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:41,951 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:41,951 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,957 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:41,957 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:41,960 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:41,967 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:41,970 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,977 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,980 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:41,980 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:20:41,980 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:20:41,981 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:41,981 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:41,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:41,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:41,983 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052041_8abfcbb8-a307-4ffa-81b1-76526d5c3b72): CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:41,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:41,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:41,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:41,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:41,992 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:41,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:41,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:41,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:41,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:41,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,054 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:42,070 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:20:42,070 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:20:42,070 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:20:42,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@7ff395fa, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:42,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,071 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:42,071 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:42,071 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:42,071 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:42,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,073 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:42,073 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:42,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:42,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,081 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:42,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:20:42,095 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.alltypesorc position=13 2018-07-21T05:20:42,125 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:42,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:42,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:42,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:42,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:42,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:42,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,182 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:42,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:20:42,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:20:42,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,191 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052041_8abfcbb8-a307-4ffa-81b1-76526d5c3b72); Time taken: 0.209 seconds 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052041_8abfcbb8-a307-4ffa-81b1-76526d5c3b72): CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:20:42,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@alltypesorc 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:DDL] in serial mode 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook to org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@2c226db0, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:42,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Found class for org.apache.hadoop.hive.ql.io.orc.OrcSerde 2018-07-21T05:20:42,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.alltypesorc on null 2018-07-21T05:20:42,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,253 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:42,254 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:42,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:42,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,259 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:42,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,259 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: create_table: Table(tableName:alltypesorc, dbName:default, owner:hive_test_user, createTime:1532175642, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:ctimestamp1, type:timestamp, comment:null), FieldSchema(name:ctimestamp2, type:timestamp, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{totalSize=0, numRows=0, rawDataSize=0, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}}, numFiles=0, bucketing_version=2, numFilesErasureCoded=0}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:20:42,259 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=create_table: Table(tableName:alltypesorc, dbName:default, owner:hive_test_user, createTime:1532175642, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:ctimestamp1, type:timestamp, comment:null), FieldSchema(name:ctimestamp2, type:timestamp, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:org.apache.hadoop.hive.ql.io.orc.OrcInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.ql.io.orc.OrcSerde, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{totalSize=0, numRows=0, rawDataSize=0, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true","COLUMN_STATS":{"cbigint":"true","cboolean1":"true","cboolean2":"true","cdouble":"true","cfloat":"true","cint":"true","csmallint":"true","cstring1":"true","cstring2":"true","ctimestamp1":"true","ctimestamp2":"true","ctinyint":"true"}}, numFiles=0, bucketing_version=2, numFilesErasureCoded=0}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:20:42,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:42,269 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:42,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,491 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@alltypesorc) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@alltypesorc) Type=TABLE WriteType=DDL_NO_LOCK is present 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,513 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:42,513 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: CREATETABLE 2018-07-21T05:20:42,513 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: database:default 2018-07-21T05:20:42,513 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@alltypesorc 2018-07-21T05:20:42,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {createTable_(Table, )=254} 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052041_8abfcbb8-a307-4ffa-81b1-76526d5c3b72); Time taken: 0.322 seconds 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:42,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE TABLE alltypesorc( ctinyint TINYINT, csmallint SMALLINT, cint INT, cbigint BIGINT, cfloat FLOAT, cdouble DOUBLE, cstring1 STRING, cstring2 STRING, ctimestamp1 TIMESTAMP, ctimestamp2 TIMESTAMP, cboolean1 BOOLEAN, cboolean2 BOOLEAN) STORED AS ORC 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.531 seconds 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:42,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:42,514 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:42,514 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:42,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:42,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:42,520 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052042_ae8c482a-d4c9-46d2-bf7d-b1c249106f0a): LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:42,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:42,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:42,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:20:42,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@27476b5f, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:42,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:42,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:42,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:42,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:42,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,529 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:42,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.LoadSemanticAnalyzer: file@null@/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc 2018-07-21T05:20:42,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,544 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:42,544 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:42,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:42,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:42,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,552 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,831 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.OrcCodecPool: Got brand-new codec ZLIB 2018-07-21T05:20:42,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:42,875 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.LoadSemanticAnalyzer: validation start 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getTable_(String, String, )=188} 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052042_ae8c482a-d4c9-46d2-bf7d-b1c249106f0a); Time taken: 0.36 seconds 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052042_ae8c482a-d4c9-46d2-bf7d-b1c249106f0a): LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:42,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: LOAD 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@alltypesorc 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@55cd41f7, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,877 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:42,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:42,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:42,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:42,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:42,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Loading data to table default.alltypesorc from file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc 2018-07-21T05:20:42,879 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,880 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:42,880 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:42,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:42,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:42,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:42,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:42,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,912 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,912 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:42,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: moving file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc to hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc (replace = REPLACE_ALL) 2018-07-21T05:20:42,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,929 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:42,929 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:42,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:42,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:42,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ReplChangeManager: Repl policy is not set for database 2018-07-21T05:20:42,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Deleting old paths for replace in hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc and old path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:42,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: The source path is /build/ql/test/data/warehouse/alltypesorc/ and the destination path is /build/ql/test/data/warehouse/alltypesorc/ 2018-07-21T05:20:42,933 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:42,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: The source path is /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc/ and the destination path is /build/ql/test/data/warehouse/alltypesorc/alltypesorc/ 2018-07-21T05:20:42,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: The source path is /build/ql/test/data/warehouse/alltypesorc/alltypesorc/ and the destination path is /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc/ 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:20:42,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:20:42,942 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741831_1007, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /build/ql/test/data/warehouse/alltypesorc/alltypesorc 2018-07-21T05:20:42,978 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/alltypesorc/alltypesorc is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:20:42,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:42,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:42,979 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:42,980 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:42,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@alltypesorc) Type=TABLE WriteType=INSERT_OVERWRITE because WriteEntity(default@alltypesorc) Type=TABLE WriteType=INSERT_OVERWRITE is present 2018-07-21T05:20:43,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.MoveTask: No locks to release because Hive concurrency support is not enabled 2018-07-21T05:20:43,035 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:STATS] in serial mode 2018-07-21T05:20:43,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@42d8b670, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:43,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:43,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:43,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:43,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:43,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:43,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:43,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Executing stats task 2018-07-21T05:20:43,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:43,038 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:43,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:43,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,042 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:43,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,042 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,043 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,062 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,062 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:43,062 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:43,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Table default.alltypesorc stats: [numFiles=1, numRows=0, totalSize=295616, rawDataSize=0, numFilesErasureCoded=0] 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: LOAD 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/data/files/alltypesorc 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@alltypesorc 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=54, getTable_(String, String, )=13} 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052042_ae8c482a-d4c9-46d2-bf7d-b1c249106f0a); Time taken: 0.241 seconds 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:43,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query LOAD DATA LOCAL INPATH "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../..//data/files/alltypesorc" OVERWRITE INTO TABLE alltypesorc 2018-07-21T05:20:43,118 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.601 seconds 2018-07-21T05:20:43,118 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:43,118 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:43,118 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:43,118 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,120 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052043_adcc64ac-5037-4792-a2a6-5669a3754831): ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:43,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,126 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:43,126 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Invoking analyze on original query 2018-07-21T05:20:43,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Starting Semantic Analysis 2018-07-21T05:20:43,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed phase 1 of Semantic Analysis 2018-07-21T05:20:43,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for source tables 2018-07-21T05:20:43,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,151 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,151 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,167 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for subqueries 2018-07-21T05:20:43,167 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for destination tables 2018-07-21T05:20:43,167 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed getting MetaData in Semantic Analysis 2018-07-21T05:20:43,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_120_6993144514321346362-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:43,179 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_120_6993144514321346362-1 2018-07-21T05:20:43,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_120_6993144514321346362-1/-ext-10000 2018-07-21T05:20:43,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:20:43,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Body Plan for Query Block null 2018-07-21T05:20:43,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Plan for Query Block null 2018-07-21T05:20:43,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Before logical optimization TS[0] 2018-07-21T05:20:43,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=24 2018-07-21T05:20:43,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:43,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,355 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:20:43,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.PredicatePushDown: After PPD: TS[0] 2018-07-21T05:20:43,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:43,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:43,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: After logical optimization TS[0] 2018-07-21T05:20:43,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:43,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,468 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 512 2018-07-21T05:20:43,470 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:20:43,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:20:43,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 1 Data size: 2956160 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:20:43,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 1 dataSize: 2956160 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {} 2018-07-21T05:20:43,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:20:43,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:20:43,538 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:43,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:20:43,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:20:43,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:20:43,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of StatsTask 2018-07-21T05:20:43,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:20:43,673 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapClusterStateForCompile: Creating cluster info for hiveptest:localhost 2018-07-21T05:20:43,676 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:20:43,676 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:20:43,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.StatsWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-20-43_120_6993144514321346362 2018-07-21T05:20:43,684 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed plan generation 2018-07-21T05:20:43,684 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:43,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: validation start 2018-07-21T05:20:43,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,684 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:alltypesorc.ctinyint, type:tinyint, comment:null), FieldSchema(name:alltypesorc.csmallint, type:smallint, comment:null), FieldSchema(name:alltypesorc.cint, type:int, comment:null), FieldSchema(name:alltypesorc.cbigint, type:bigint, comment:null), FieldSchema(name:alltypesorc.cfloat, type:float, comment:null), FieldSchema(name:alltypesorc.cdouble, type:double, comment:null), FieldSchema(name:alltypesorc.cstring1, type:string, comment:null), FieldSchema(name:alltypesorc.cstring2, type:string, comment:null), FieldSchema(name:alltypesorc.ctimestamp1, type:timestamp, comment:null), FieldSchema(name:alltypesorc.ctimestamp2, type:timestamp, comment:null), FieldSchema(name:alltypesorc.cboolean1, type:boolean, comment:null), FieldSchema(name:alltypesorc.cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:20:43,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,684 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:43,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getTable_(String, String, )=33, flushCache_()=0, getTableColumnStatistics_(String, String, List, )=27} 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052043_adcc64ac-5037-4792-a2a6-5669a3754831); Time taken: 0.564 seconds 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052043_adcc64ac-5037-4792-a2a6-5669a3754831): ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:20:43,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@alltypesorc 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:STATS] in serial mode 2018-07-21T05:20:43,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@2fb3c642, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:43,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:43,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:43,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Executing stats (no job) task 2018-07-21T05:20:43,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.StatsTask: Initialized threadpool for stats computation with 10 threads 2018-07-21T05:20:43,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Stats collection waiting for threadpool to shutdown.. 2018-07-21T05:20:43,693 DEBUG [StatsNoJobTask-Thread-0] FileOperations: Aggregating stats for hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:43,721 DEBUG [StatsNoJobTask-Thread-0] FileOperations: Computing stats for HdfsNamedFileStatus{path=hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/alltypesorc; isDirectory=false; length=295616; replication=3; blocksize=134217728; modification_time=1532175642978; access_time=1532175642938; owner=hiveptest; group=supergroup; permission=rw-r--r--; isSymlink=false; hasAcl=false; isEncrypted=false; isErasureCoded=false} 2018-07-21T05:20:43,721 DEBUG [StatsNoJobTask-Thread-0] exec.Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 2018-07-21T05:20:43,739 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:43,739 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:43,747 DEBUG [StatsNoJobTask-Thread-0] orc.OrcInputFormat: No ORC pushdown predicate - no column names 2018-07-21T05:20:43,747 INFO [StatsNoJobTask-Thread-0] orc.ReaderImpl: Reading ORC rows from hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/alltypesorc with {include: null, offset: 0, length: 0} 2018-07-21T05:20:43,761 INFO [StatsNoJobTask-Thread-0] impl.RecordReaderImpl: Reader schema not provided -- using file schema struct 2018-07-21T05:20:43,774 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:43,802 DEBUG [StatsNoJobTask-Thread-0] impl.ReaderImpl: Unknown primitive category: STRUCT 2018-07-21T05:20:43,803 DEBUG [StatsNoJobTask-Thread-0] stats.BasicStatsNoJobTask: Table default.alltypesorc stats: [numFiles=1, numRows=12288, totalSize=295616, rawDataSize=2907994, numFilesErasureCoded=0] 2018-07-21T05:20:43,803 INFO [StatsNoJobTask-Thread-0] stats.BasicStatsNoJobTask: Table default.alltypesorc stats: [numFiles=1, numRows=12288, totalSize=295616, rawDataSize=2907994, numFilesErasureCoded=0] 2018-07-21T05:20:43,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Stats collection threadpool shutdown successful. 2018-07-21T05:20:43,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Collectors.size(): [default@alltypesorc#PTable] 2018-07-21T05:20:43,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updating stats for: default.alltypesorc 2018-07-21T05:20:43,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,815 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:43,815 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:43,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:43,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,821 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:43,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,821 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:43,822 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:43,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updated stats for default.alltypesorc. 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updated stats for: default.alltypesorc 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,859 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,859 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:20:43,859 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:20:43,859 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@alltypesorc 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 1 2018-07-21T05:20:43,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:43,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=38} 2018-07-21T05:20:43,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052043_adcc64ac-5037-4792-a2a6-5669a3754831); Time taken: 0.223 seconds 2018-07-21T05:20:43,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:43,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:43,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query ANALYZE TABLE alltypesorc COMPUTE STATISTICS 2018-07-21T05:20:43,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_120_6993144514321346362-1 2018-07-21T05:20:43,913 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.789 seconds 2018-07-21T05:20:43,913 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:43,913 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:43,913 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:43,913 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:43,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:43,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:43,915 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af): ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:43,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:43,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:43,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:43,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,919 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:43,920 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,920 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,920 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: select compute_stats(`ctinyint`, 'hll') , compute_stats(`csmallint`, 'hll') , compute_stats(`cint`, 'hll') , compute_stats(`cbigint`, 'hll') , compute_stats(`cfloat`, 'hll') , compute_stats(`cdouble`, 'hll') , compute_stats(`cstring1`, 'hll') , compute_stats(`cstring2`, 'hll') , compute_stats(`ctimestamp1`, 'hll') , compute_stats(`ctimestamp2`, 'hll') , compute_stats(`cboolean1`, 'hll') , compute_stats(`cboolean2`, 'hll') from `default`.`alltypesorc` 2018-07-21T05:20:43,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: select compute_stats(`ctinyint`, 'hll') , compute_stats(`csmallint`, 'hll') , compute_stats(`cint`, 'hll') , compute_stats(`cbigint`, 'hll') , compute_stats(`cfloat`, 'hll') , compute_stats(`cdouble`, 'hll') , compute_stats(`cstring1`, 'hll') , compute_stats(`cstring2`, 'hll') , compute_stats(`ctimestamp1`, 'hll') , compute_stats(`ctimestamp2`, 'hll') , compute_stats(`cboolean1`, 'hll') , compute_stats(`cboolean2`, 'hll') from `default`.`alltypesorc` 2018-07-21T05:20:43,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:43,952 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Invoking analyze on rewritten query 2018-07-21T05:20:43,952 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Starting Semantic Analysis 2018-07-21T05:20:43,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:43,955 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed phase 1 of Semantic Analysis 2018-07-21T05:20:43,955 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for source tables 2018-07-21T05:20:43,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,956 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,956 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:43,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:43,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:43,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for subqueries 2018-07-21T05:20:43,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Get metadata for destination tables 2018-07-21T05:20:43,991 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1 2018-07-21T05:20:43,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed getting MetaData in Semantic Analysis 2018-07-21T05:20:43,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:43,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1 2018-07-21T05:20:43,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10001 2018-07-21T05:20:43,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:20:44,003 INFO [SessionTracker] server.ZooKeeperServer: Expiring session 0x164bcc8430d0002, timeout of 30000ms exceeded 2018-07-21T05:20:44,005 INFO [SessionTracker] server.ZooKeeperServer: Expiring session 0x164bcc8430d0003, timeout of 30000ms exceeded 2018-07-21T05:20:44,007 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0002 2018-07-21T05:20:44,007 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0003 2018-07-21T05:20:44,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,063 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: compute_stats 2018-07-21T05:20:44,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)(RAW__DATA__SIZE,RAW__DATA__SIZE: bigint)} after GB {((tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col0: struct)((tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col1: struct)((tok_function compute_stats (tok_table_or_col cint) 'hll'),_col2: struct)((tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col3: struct)((tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col4: struct)((tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col5: struct)((tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col6: struct)((tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col7: struct)((tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col8: struct)((tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col9: struct)((tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col10: struct)((tok_function compute_stats (tok_table_or_col cboolean2) 'hll'),_col11: struct)} 2018-07-21T05:20:44,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: tree: (tok_select (tok_selexpr (tok_function compute_stats (tok_table_or_col ctinyint) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col csmallint) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cint) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cbigint) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cfloat) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cdouble) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cstring1) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cstring2) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cboolean1) 'hll')) (tok_selexpr (tok_function compute_stats (tok_table_or_col cboolean2) 'hll'))) 2018-07-21T05:20:44,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: genSelectPlan: input = {((tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col0: struct)((tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col1: struct)((tok_function compute_stats (tok_table_or_col cint) 'hll'),_col2: struct)((tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col3: struct)((tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col4: struct)((tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col5: struct)((tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col6: struct)((tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col7: struct)((tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col8: struct)((tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col9: struct)((tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col10: struct)((tok_function compute_stats (tok_table_or_col cboolean2) 'hll'),_col11: struct)} starRr = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)(RAW__DATA__SIZE,RAW__DATA__SIZE: bigint)} 2018-07-21T05:20:44,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Select Plan row schema: null{(_c0,_col0: struct)(_c1,_col1: struct)(_c2,_col2: struct)(_c3,_col3: struct)(_c4,_col4: struct)(_c5,_col5: struct)(_c6,_col6: struct)(_c7,_col7: struct)(_c8,_col8: struct)(_c9,_col9: struct)(_c10,_col10: struct)(_c11,_col11: struct)} 2018-07-21T05:20:44,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Select Plan for clause: insclause-0 2018-07-21T05:20:44,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000 2018-07-21T05:20:44,100 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1 2018-07-21T05:20:44,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10003 2018-07-21T05:20:44,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000 row schema: null{(_c0,_col0: struct)(_c1,_col1: struct)(_c2,_col2: struct)(_c3,_col3: struct)(_c4,_col4: struct)(_c5,_col5: struct)(_c6,_col6: struct)(_c7,_col7: struct)(_c8,_col8: struct)(_c9,_col9: struct)(_c10,_col10: struct)(_c11,_col11: struct)} 2018-07-21T05:20:44,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Body Plan for Query Block null 2018-07-21T05:20:44,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Created Plan for Query Block null 2018-07-21T05:20:44,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Before logical optimization TS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-SEL[5]-FS[6] 2018-07-21T05:20:44,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=9 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[1] 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cfloat] Column[cdouble] Column[cstring1] Column[cstring2] Column[ctimestamp1] Column[ctimestamp2] Column[cboolean1] Column[cboolean2] Column[BLOCK__OFFSET__INSIDE__FILE] Column[INPUT__FILE__NAME] Column[ROW__ID] Column[RAW__DATA__SIZE]) 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_1 {} 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[2] 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),VALUE._col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),VALUE._col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),VALUE._col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),VALUE._col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),VALUE._col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),VALUE._col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),VALUE._col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),VALUE._col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),VALUE._col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),VALUE._col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),VALUE._col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_2 {} 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:20:44,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[4] 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: struct|{null}_c0,_col1: struct|{null}_c1,_col2: struct|{null}_c2,_col3: struct|{null}_c3,_col4: struct|{null}_c4,_col5: struct|{null}_c5,_col6: struct|{null}_c6,_col7: struct|{null}_c7,_col8: struct|{null}_c8,_col9: struct|{null}_c9,_col10: struct|{null}_c10,_col11: struct|{null}_c11) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_4 {} 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] Column[_col11]) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: struct|{},_col1: struct|{},_col2: struct|{},_col3: struct|{},_col4: struct|{},_col5: struct|{},_col6: struct|{},_col7: struct|{},_col8: struct|{},_col9: struct|{},_col10: struct|{},_col11: struct|{}) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(6) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(5) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for GBY(4) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for RS(3) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for GBY(2) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:20:44,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.PredicatePushDown: After PPD: TS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-SEL[5]-FS[6] 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[1] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cfloat] Column[cdouble] Column[cstring1] Column[cstring2] Column[ctimestamp1] Column[ctimestamp2] Column[cboolean1] Column[cboolean2] Column[BLOCK__OFFSET__INSIDE__FILE] Column[INPUT__FILE__NAME] Column[ROW__ID] Column[RAW__DATA__SIZE]) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_1 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[2] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),VALUE._col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),VALUE._col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),VALUE._col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),VALUE._col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),VALUE._col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),VALUE._col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),VALUE._col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),VALUE._col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),VALUE._col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),VALUE._col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),VALUE._col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_2 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[4] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: struct|{null}_c0,_col1: struct|{null}_c1,_col2: struct|{null}_c2,_col3: struct|{null}_c3,_col4: struct|{null}_c4,_col5: struct|{null}_c5,_col6: struct|{null}_c6,_col7: struct|{null}_c7,_col8: struct|{null}_c8,_col9: struct|{null}_c9,_col10: struct|{null}_c10,_col11: struct|{null}_c11) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_4 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] Column[_col11]) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: struct|{},_col1: struct|{},_col2: struct|{},_col3: struct|{},_col4: struct|{},_col5: struct|{},_col6: struct|{},_col7: struct|{},_col8: struct|{},_col9: struct|{},_col10: struct|{},_col11: struct|{}) 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[1] 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cfloat] Column[cdouble] Column[cstring1] Column[cstring2] Column[ctimestamp1] Column[ctimestamp2] Column[cboolean1] Column[cboolean2] Column[BLOCK__OFFSET__INSIDE__FILE] Column[INPUT__FILE__NAME] Column[ROW__ID] Column[RAW__DATA__SIZE]) 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_1 {} 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[2] 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),VALUE._col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),VALUE._col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),VALUE._col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),VALUE._col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),VALUE._col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),VALUE._col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),VALUE._col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),VALUE._col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),VALUE._col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),VALUE._col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),VALUE._col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_2 {} 2018-07-21T05:20:44,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[4] 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: struct|{null}_c0,_col1: struct|{null}_c1,_col2: struct|{null}_c2,_col3: struct|{null}_c3,_col4: struct|{null}_c4,_col5: struct|{null}_c5,_col6: struct|{null}_c6,_col7: struct|{null}_c7,_col8: struct|{null}_c8,_col9: struct|{null}_c9,_col10: struct|{null}_c10,_col11: struct|{null}_c11) 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_4 {} 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] Column[_col11]) 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: struct|{},_col1: struct|{},_col2: struct|{},_col3: struct|{},_col4: struct|{},_col5: struct|{},_col6: struct|{},_col7: struct|{},_col8: struct|{},_col9: struct|{},_col10: struct|{},_col11: struct|{}) 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 3 key:[] 2018-07-21T05:20:44,181 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 3 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col11=Column[_col11], VALUE._col1=Column[_col1], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:20:44,181 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 3 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col11=Column[_col11], VALUE._col1=Column[_col1], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:20:44,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.IdentityProjectRemover: Identity project remover optimization removed : SEL[5] 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: After logical optimization TS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-FS[6] 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[1] 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cfloat] Column[cdouble] Column[cstring1] Column[cstring2] Column[ctimestamp1] Column[ctimestamp2] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_1 {} 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[2] 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),VALUE._col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),VALUE._col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),VALUE._col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),VALUE._col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),VALUE._col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),VALUE._col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),VALUE._col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),VALUE._col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),VALUE._col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),VALUE._col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),VALUE._col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_2 {} 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[4] 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: struct|{},_col1: struct|{},_col2: struct|{},_col3: struct|{},_col4: struct|{},_col5: struct|{},_col6: struct|{},_col7: struct|{},_col8: struct|{},_col9: struct|{},_col10: struct|{},_col11: struct|{}) 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_4 {} 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 512 2018-07-21T05:20:44,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:44,188 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:20:44,188 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:20:44,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 15.169491ms + 0.091322ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:20:44,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:44,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:44,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 5697400 Basic stats: COMPLETE Column stats: NONE) on TS[0] 2018-07-21T05:20:44,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 5697400 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 2457 numNulls: 614 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 2457 numNulls: 614 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, csmallint= colName: csmallint colType: smallint countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 614 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 2457 numNulls: 614 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 2457 numNulls: 614 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 614 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 2457 numNulls: 614 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cbigint= colName: cbigint colType: bigint countDistincts: 2457 numNulls: 614 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true} 2018-07-21T05:20:44,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 5697400 Basic stats: COMPLETE Column stats: NONE) on SEL[1] 2018-07-21T05:20:44,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [1] STATS-SEL[1]: numRows: 12288 dataSize: 5697400 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 2457 numNulls: 614 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 2457 numNulls: 614 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, csmallint= colName: csmallint colType: smallint countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 614 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 2457 numNulls: 614 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 2457 numNulls: 614 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 614 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 2457 numNulls: 614 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 2457 numNulls: 614 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cbigint= colName: cbigint colType: bigint countDistincts: 2457 numNulls: 614 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true} 2018-07-21T05:20:44,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: STATS-GBY[2]: inputSize: 5697400 maxSplitSize: 256000000 parallelism: 1 containsGroupingSet: false sizeOfGroupingSet: 1 2018-07-21T05:20:44,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [Case 1] STATS-GBY[2]: cardinality: 12288 2018-07-21T05:20:44,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.GroupByOperator: Setting stats (Num rows: 1 Data size: 5304 Basic stats: COMPLETE Column stats: NONE) on GBY[2] 2018-07-21T05:20:44,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-GBY[2]: numRows: 1 dataSize: 5304 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, _col11= colName: _col11 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, _col0= colName: _col0 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true, _col9= colName: _col9 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, _col2= colName: _col2 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false} 2018-07-21T05:20:44,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 1 Data size: 5304 Basic stats: COMPLETE Column stats: NONE) on RS[3] 2018-07-21T05:20:44,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[3]: numRows: 1 dataSize: 5304 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, _col11= colName: _col11 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, _col0= colName: _col0 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true, _col9= colName: _col9 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, _col2= colName: _col2 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 424.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false} 2018-07-21T05:20:44,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: STATS-GBY[4]: inputSize: 1 maxSplitSize: 256000000 parallelism: 1 containsGroupingSet: false sizeOfGroupingSet: 1 2018-07-21T05:20:44,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [Case 7] STATS-GBY[4]: cardinality: 0 2018-07-21T05:20:44,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: STATS-GBY[4]: Equals 0 in number of rows. 0 rows will be set to 1 2018-07-21T05:20:44,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.GroupByOperator: Setting stats (Num rows: 1 Data size: 5432 Basic stats: COMPLETE Column stats: NONE) on GBY[4] 2018-07-21T05:20:44,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-GBY[4]: numRows: 1 dataSize: 5432 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, _col11= colName: _col11 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, _col0= colName: _col0 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true, _col9= colName: _col9 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, _col2= colName: _col2 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false} 2018-07-21T05:20:44,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[6]: numRows: 1 dataSize: 5432 basicStatsState: COMPLETE colStatsState: NONE colStats: {cint= colName: cint colType: int countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -9223372036854775808 max: 9223372036854775807 ] isPrimaryKey: false isEstimated: true, _col11= colName: _col11 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 272.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -32768 max: 32767 ] isPrimaryKey: false isEstimated: true, cboolean2= colName: cboolean2 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cboolean1= colName: cboolean1 colType: boolean countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 6144 numFalses: 6144 isPrimaryKey: false isEstimated: true, cfloat= colName: cfloat colType: float countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: 1.4E-45 max: 3.4028235E38 ] isPrimaryKey: false isEstimated: true, _col0= colName: _col0 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147483648 max: 2147483647 ] isPrimaryKey: false isEstimated: true, _col9= colName: _col9 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 1 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, cdouble= colName: cdouble colType: double countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: 4.9E-324 max: 1.7976931348623157E308 ] isPrimaryKey: false isEstimated: true, cstring2= colName: cstring2 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, ctinyint= colName: ctinyint colType: tinyint countDistincts: 1 numNulls: 0 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -128 max: 127 ] isPrimaryKey: false isEstimated: true, cstring1= colName: cstring1 colType: string countDistincts: 1 numNulls: 0 avgColLen: 100.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: true, _col2= colName: _col2 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: struct countDistincts: 1 numNulls: 0 avgColLen: 440.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false} 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[1] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.GroupByOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on GBY[2] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[3] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.GroupByOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on GBY[4] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on FS[6] 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Number of reducers determined to be: 1 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 6 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: GBY, 4 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 3 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: GBY, 2 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 1 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:20:44,258 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id,RAW__DATA__SIZE: bigint|{alltypesorc}raw__data__size) 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[1] 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cfloat] Column[cdouble] Column[cstring1] Column[cstring2] Column[ctimestamp1] Column[ctimestamp2] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_1 {} 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[2] 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),VALUE._col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),VALUE._col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),VALUE._col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),VALUE._col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),VALUE._col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),VALUE._col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),VALUE._col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),VALUE._col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),VALUE._col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),VALUE._col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),VALUE._col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_2 {} 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: struct|{}(tok_function compute_stats (tok_table_or_col ctinyint) 'hll'),_col1: struct|{}(tok_function compute_stats (tok_table_or_col csmallint) 'hll'),_col2: struct|{}(tok_function compute_stats (tok_table_or_col cint) 'hll'),_col3: struct|{}(tok_function compute_stats (tok_table_or_col cbigint) 'hll'),_col4: struct|{}(tok_function compute_stats (tok_table_or_col cfloat) 'hll'),_col5: struct|{}(tok_function compute_stats (tok_table_or_col cdouble) 'hll'),_col6: struct|{}(tok_function compute_stats (tok_table_or_col cstring1) 'hll'),_col7: struct|{}(tok_function compute_stats (tok_table_or_col cstring2) 'hll'),_col8: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp1) 'hll'),_col9: struct|{}(tok_function compute_stats (tok_table_or_col ctimestamp2) 'hll'),_col10: struct|{}(tok_function compute_stats (tok_table_or_col cboolean1) 'hll'),_col11: struct|{}(tok_function compute_stats (tok_table_or_col cboolean2) 'hll')) 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator GBY[4] 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: struct|{},_col1: struct|{},_col2: struct|{},_col3: struct|{},_col4: struct|{},_col5: struct|{},_col6: struct|{},_col7: struct|{},_col8: struct|{},_col9: struct|{},_col10: struct|{},_col11: struct|{}) 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op GBY_4 {} 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:20:44,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[3] 2018-07-21T05:20:44,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:20:44,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:20:44,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:44,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[3] 2018-07-21T05:20:44,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: GBY[4] 2018-07-21T05:20:44,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[6] 2018-07-21T05:20:44,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for GBY[4] 2018-07-21T05:20:44,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[3] with following reduce work: Reducer 2 2018-07-21T05:20:44,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[3] as parent from GBY[4] 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[6] 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:20:44,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:20:44,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:20:44,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:20:44,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:20:44,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:20:44,278 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:20:44,280 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Examining input format to see if vectorization is enabled. 2018-07-21T05:20:44,293 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Vectorization is enabled for input format(s) [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:20:44,293 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing MapWork... (vectorizedVertexNum 0) 2018-07-21T05:20:44,294 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorization enabled: true 2018-07-21T05:20:44,294 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorized: false 2018-07-21T05:20:44,295 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map notVectorizedReason: TABLESCAN operator: gather stats not supported 2018-07-21T05:20:44,295 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorizedVertexNum: 0 2018-07-21T05:20:44,295 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map enabledConditionsMet: [hive.vectorized.use.vectorized.input.format IS true] 2018-07-21T05:20:44,295 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map inputFileFormatClassNameSet: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:20:44,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Using reduce tag 0 2018-07-21T05:20:44,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lazybinary.LazyBinarySerDe: LazyBinarySerDe initialized with: columnNames=[_col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11] columnTypes=[struct, struct, struct, struct, struct, struct, struct, struct, struct, struct, struct, struct] 2018-07-21T05:20:44,321 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing ReduceWork... (vectorizedVertexNum 1) 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorization enabled: true 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorized: false 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce notVectorizedReason: Aggregation Function expression for GROUPBY operator: UDF compute_stats not supported 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorizedVertexNum: 1 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer hive.vectorized.execution.reduce.enabled: true 2018-07-21T05:20:44,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer engine: tez 2018-07-21T05:20:44,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:20:44,354 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:20:44,354 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:20:44,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,364 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: Completed plan generation 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: validation start 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ColumnStatsSemanticAnalyzer: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:_c0, type:struct, comment:null), FieldSchema(name:_c1, type:struct, comment:null), FieldSchema(name:_c2, type:struct, comment:null), FieldSchema(name:_c3, type:struct, comment:null), FieldSchema(name:_c4, type:struct, comment:null), FieldSchema(name:_c5, type:struct, comment:null), FieldSchema(name:_c6, type:struct, comment:null), FieldSchema(name:_c7, type:struct, comment:null), FieldSchema(name:_c8, type:struct, comment:null), FieldSchema(name:_c9, type:struct, comment:null), FieldSchema(name:_c10, type:struct, comment:null), FieldSchema(name:_c11, type:struct, comment:null)], properties:null) 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=1, getTable_(String, String, )=31, flushCache_()=0, getTableColumnStatistics_(String, String, List, )=45} 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af); Time taken: 0.45 seconds 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,365 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af): ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:44,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: ANALYZE_TABLE 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@alltypesorc 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000 2018-07-21T05:20:44,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af 2018-07-21T05:20:44,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:20:44,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:20:44,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MAPRED] in serial mode 2018-07-21T05:20:44,398 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.initDataset(QTestUtil.java:1130) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.initDataSetForTest(QTestUtil.java:1114) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.cliInit(QTestUtil.java:1171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:170) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:20:44,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_915_7777741964758988106-1 2018-07-21T05:20:44,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_915_7777741964758988106-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:20:44,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:20:44,466 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:20:44,466 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:20:44,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,467 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af 2018-07-21T05:20:44,467 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:20:44,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: No local resources to process (other than hive-exec) 2018-07-21T05:20:44,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,467 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: ANALYZE TABLE alltype...,cboolean1,cboolean2 (Stage-0) 2018-07-21T05:20:44,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\nANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2"} 2018-07-21T05:20:44,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:20:44,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,625 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:20:44,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 3.44KB 2018-07-21T05:20:44,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10002 2018-07-21T05:20:44,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000 2018-07-21T05:20:44,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,684 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_915_7777741964758988106-1 2018-07-21T05:20:44,705 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:20:44,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:20:44,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,720 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.62KB 2018-07-21T05:20:44,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10001 2018-07-21T05:20:44,786 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10001 2018-07-21T05:20:44,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:44,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:44,847 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=ANALYZE TABLE alltype...,cboolean1,cboolean2 (Stage-0), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af } 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:20:45,126 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:20:45,127 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741832_1008, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/summary 2018-07-21T05:20:45,255 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/summary for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:20:45,275 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:45,276 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741833_1009, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_1.recovery 2018-07-21T05:20:45,353 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_1.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:20:45,474 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_1, dagName=ANALYZE TABLE alltype...,cboolean1,cboolean2 (Stage-0) 2018-07-21T05:20:45,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:45,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:45,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:46,000 INFO [SessionTracker] server.ZooKeeperServer: Expiring session 0x164bcc8430d0001, timeout of 30000ms exceeded 2018-07-21T05:20:46,004 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0001 2018-07-21T05:20:46,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:46,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:20:46,686 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:20:48,670 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:20:48,692 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:20:48,707 DEBUG [PublicLocalizer #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: PublicLocalizer #0, runnable type: java.util.concurrent.ExecutorCompletionService$QueueingFuture 2018-07-21T05:20:48,784 DEBUG [PublicLocalizer #0] concurrent.ExecutorHelper: afterExecute in thread: PublicLocalizer #0, runnable type: java.util.concurrent.ExecutorCompletionService$QueueingFuture 2018-07-21T05:20:48,990 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:20:48,990 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:20:48,990 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:20:48,990 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:20:48,991 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:20:48,996 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:20:49,008 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:20:49,009 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:20:49,009 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:20:49,009 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:20:49,227 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:20:49,293 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:20:49,311 DEBUG [ContainersLauncher #0] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #0, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:20:49,715 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:20:50,316 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:20:50,316 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:20:52,745 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:20:53,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:53,754 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:20:56,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:20:56,285 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741834_1010, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10001/tmpstats-0_TS_0 2018-07-21T05:20:56,331 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/alltypesorc/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/-ext-10001/tmpstats-0_TS_0 is closed by DFSClient_attempt_1532175606211_0001_m_000000_0_-736482025_14 2018-07-21T05:20:56,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:56,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:56,793 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:20:56,853 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:56,853 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:20:56,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:20:56,854 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741835_1011, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/_task_tmp.-ext-10002/_tmp.000000_0 2018-07-21T05:20:56,877 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000/.hive-staging_hive_2018-07-21_05-20-43_937_5887061281571584581-1/_task_tmp.-ext-10002/_tmp.000000_0 is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_74347768_33 2018-07-21T05:20:56,955 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_1.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:20:56,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:56,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:20:56,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:56,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] counters.Limits: Counter limits initialized with parameters: GROUP_NAME_MAX=256, MAX_GROUPS=500, COUNTER_NAME_MAX=64, MAX_COUNTERS=1024 2018-07-21T05:20:57,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: TaskId for 000000_0 = 000000 2018-07-21T05:20:57,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,031 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:STATS] in serial mode 2018-07-21T05:20:57,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@1aaf3aa1, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:57,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:57,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Executing stats (no job) task 2018-07-21T05:20:57,032 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.StatsTask: Initialized threadpool for stats computation with 10 threads 2018-07-21T05:20:57,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Stats collection waiting for threadpool to shutdown.. 2018-07-21T05:20:57,034 DEBUG [StatsNoJobTask-Thread-0] FileOperations: Aggregating stats for hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:20:57,036 DEBUG [StatsNoJobTask-Thread-0] FileOperations: Computing stats for HdfsNamedFileStatus{path=hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/alltypesorc; isDirectory=false; length=295616; replication=3; blocksize=134217728; modification_time=1532175642978; access_time=1532175642938; owner=hiveptest; group=supergroup; permission=rw-r--r--; isSymlink=false; hasAcl=false; isEncrypted=false; isErasureCoded=false} 2018-07-21T05:20:57,050 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:57,050 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:57,051 DEBUG [StatsNoJobTask-Thread-0] orc.OrcInputFormat: No ORC pushdown predicate - no column names 2018-07-21T05:20:57,051 INFO [StatsNoJobTask-Thread-0] orc.ReaderImpl: Reading ORC rows from hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc/alltypesorc with {include: null, offset: 0, length: 0} 2018-07-21T05:20:57,051 INFO [StatsNoJobTask-Thread-0] impl.RecordReaderImpl: Reader schema not provided -- using file schema struct 2018-07-21T05:20:57,051 DEBUG [StatsNoJobTask-Thread-0] impl.OrcCodecPool: Got recycled codec 2018-07-21T05:20:57,053 DEBUG [StatsNoJobTask-Thread-0] impl.ReaderImpl: Unknown primitive category: STRUCT 2018-07-21T05:20:57,053 DEBUG [StatsNoJobTask-Thread-0] stats.BasicStatsNoJobTask: Table default.alltypesorc stats: [numFiles=1, numRows=12288, totalSize=295616, rawDataSize=2907994, numFilesErasureCoded=0] 2018-07-21T05:20:57,054 INFO [StatsNoJobTask-Thread-0] stats.BasicStatsNoJobTask: Table default.alltypesorc stats: [numFiles=1, numRows=12288, totalSize=295616, rawDataSize=2907994, numFilesErasureCoded=0] 2018-07-21T05:20:57,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Stats collection threadpool shutdown successful. 2018-07-21T05:20:57,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Collectors.size(): [default@alltypesorc#PTable] 2018-07-21T05:20:57,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updating stats for: default.alltypesorc 2018-07-21T05:20:57,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,056 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:57,056 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:57,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:57,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:57,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:57,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.alltypesorc newtbl=alltypesorc 2018-07-21T05:20:57,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updated stats for default.alltypesorc. 2018-07-21T05:20:57,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsNoJobTask: Updated stats for: default.alltypesorc 2018-07-21T05:20:57,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: FetchOperator get writeIdStr: null 2018-07-21T05:20:57,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Time taken to get FileStatuses: 10 2018-07-21T05:20:57,126 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Total input files to process : 1 2018-07-21T05:20:57,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Total # of splits generated by getSplits: 1, TimeTaken: 17 2018-07-21T05:20:57,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<_col0:struct,_col1:struct,_col2:struct,_col3:struct,_col4:struct,_col5:struct,_col6:struct,_col7:struct,_col8:struct,_col9:struct,_col10:struct,_col11:struct> 2018-07-21T05:20:57,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns=_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11, serialization.escape.crlf=true, serialization.lib=org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, hive.serialization.extend.additional.nesting.levels=true, serialization.format=1, columns.types=struct:struct:struct:struct:struct:struct:struct:struct:struct:struct:struct:struct, escape.delim=\} partition properties: {columns=_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11, serialization.escape.crlf=true, serialization.lib=org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, hive.serialization.extend.additional.nesting.levels=true, serialization.format=1, columns.types=struct:struct:struct:struct:struct:struct:struct:struct:struct:struct:struct:struct, escape.delim=\} 2018-07-21T05:20:57,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,204 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: write_column_statistics: table=hive.default.alltypesorc 2018-07-21T05:20:57,204 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=write_column_statistics: table=hive.default.alltypesorc 2018-07-21T05:20:57,228 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=ctinyint 2018-07-21T05:20:57,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,242 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=csmallint 2018-07-21T05:20:57,243 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cint 2018-07-21T05:20:57,244 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cbigint 2018-07-21T05:20:57,245 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cfloat 2018-07-21T05:20:57,247 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cdouble 2018-07-21T05:20:57,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,254 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cstring1 2018-07-21T05:20:57,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cstring2 2018-07-21T05:20:57,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=ctimestamp1 2018-07-21T05:20:57,259 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=ctimestamp2 2018-07-21T05:20:57,260 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cboolean1 2018-07-21T05:20:57,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,263 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: Updating table level column statistics for table=hive.default.alltypesorc colName=cboolean2 2018-07-21T05:20:57,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:57,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: ANALYZE_TABLE 2018-07-21T05:20:57,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:20:57,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@alltypesorc 2018-07-21T05:20:57,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_937_5887061281571584581-1/-mr-10000 2018-07-21T05:20:57,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,272 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:20:57,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,273 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=35, setPartitionColumnStatistics_(SetPartitionsStatsRequest, )=67} 2018-07-21T05:20:57,273 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052043_c0cde5b4-d90a-4336-ad68-cd1887bb30af); Time taken: 12.908 seconds 2018-07-21T05:20:57,273 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,273 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2 2018-07-21T05:20:57,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-43_915_7777741964758988106-1 2018-07-21T05:20:57,275 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 13.358 seconds 2018-07-21T05:20:57,275 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,275 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:57,275 INFO [main] QTestUtil: Result from cliDrriver.processLine in initFromDatasets=0 2018-07-21T05:20:57,283 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0007 2018-07-21T05:20:57,285 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0007 closed 2018-07-21T05:20:57,287 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@3ccc5fa2 2018-07-21T05:20:57,290 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:53759 2018-07-21T05:20:57,290 INFO [main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook to org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl 2018-07-21T05:20:57,290 DEBUG [main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@200eaca, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:57,290 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,290 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:57,290 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:57,290 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:57,291 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:57,291 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:57,291 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:20:57,291 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,291 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:20:57,292 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:53759 2018-07-21T05:20:57,292 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:20:57,293 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0008 with negotiated timeout 40000 for client /127.0.0.1:53759 2018-07-21T05:20:57,293 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:57,296 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:57,296 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:57,297 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,297 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:57,298 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:20:57,298 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:20:57,301 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:20:57,311 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:20:57,315 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,324 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,327 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:20:57,327 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:20:57,327 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:20:57,330 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,330 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:57,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: false 2018-07-21T05:20:57,332 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,332 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:57,332 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,332 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:57,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:20:57,333 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,333 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:57,333 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,333 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:57,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:20:57,333 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,333 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:20:57,333 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:20:57,333 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:20:57,335 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0): CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:20:57,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:20:57,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:20:57,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:57,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,345 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:57,345 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:57,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:57,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,351 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:57,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,351 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:20:57,351 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:20:57,351 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:20:57,352 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:20:57,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@6758d0c5, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:20:57,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:20:57,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:20:57,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:20:57,353 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:20:57,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:20:57,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,355 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:20:57,355 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:57,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:57,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,359 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:57,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:20:57,360 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table_0 position=22 2018-07-21T05:20:57,442 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Creating Druid HTTP client with 20 max parallel connections and 60000ms read timeout 2018-07-21T05:20:57,586 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] lifecycle.Lifecycle$AnnotationBasedHandler: Invoking start method[public void org.apache.hive.druid.com.metamx.http.client.NettyHttpClient.start()] on object[org.apache.hive.druid.com.metamx.http.client.NettyHttpClient@2bab73fd]. 2018-07-21T05:20:57,590 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:20:57,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:20:57,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,601 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:20:57,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,602 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:20:57,602 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:20:57,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:57,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:57,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:57,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:20:57,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:20:57,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:57,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:57,623 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,625 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:20:57,625 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:20:57,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:57,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:57,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:57,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:57,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:57,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:20:57,628 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1 2018-07-21T05:20:57,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:20:58,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:58,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,536 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,536 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:58,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:58,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,583 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,583 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:58,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,584 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,584 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:20:58,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:58,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:58,604 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:20:58,604 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:20:58,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:58,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:20:59,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:20:59,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,919 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:20:59,943 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table_0 position=22 2018-07-21T05:20:59,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,943 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:20:59,943 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:20:59,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:59,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,946 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:59,946 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:59,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:59,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:59,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,949 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:20:59,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,949 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:59,949 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:20:59,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:59,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,964 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:20:59,965 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:20:59,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,965 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:20:59,965 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:20:59,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:20:59,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:20:59,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:20:59,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:20:59,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:20:59,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:20:59,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:20:59,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:20:59,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:20:59,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:20:59,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:20:59,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:20:59,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:21:00,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,017 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:21:00,017 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:21:00,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:21:00,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:00,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:21:00,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:21:00,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:21:00,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:21:00,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:21:00,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=2 2018-07-21T05:21:00,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:21:00,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,030 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:21:00,030 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:21:00,030 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:21:00,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:21:00,037 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:21:00,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:21:00,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:21:00,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:21:00,042 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:21:00,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity]] 2018-07-21T05:21:00,048 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:21:00,048 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:21:00,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:21:00,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:21:00,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:21:00,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:21:00,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:21:00,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:21:00,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:21:00,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:21:00,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:21:00,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 6.82541ms + 0.029899ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:21:00,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:00,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:21:00,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:21:00,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:21:00,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:21:00,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:21:00,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:21:00,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:21:00,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,111 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:21:00,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:21:00,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:21:00,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:21:00,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,118 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:21:00,118 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:21:00,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:21:00,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:00,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,121 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:21:00,121 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:21:00,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:21:00,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:00,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:00,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:21:00,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:21:00,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:21:00,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:21:00,126 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_partitioned_table_0 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:21:00,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:21:00,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:21:00,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:21:00,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:21:00,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:21:00,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:21:00,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,130 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:21:00,130 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:21:00,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:21:00,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:21:00,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,130 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:21:00,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:21:00,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=22, getUniqueConstraints_(UniqueConstraintsRequest, )=19, getPrimaryKeys_(PrimaryKeysRequest, )=19, getTableColumnStatistics_(String, String, List, )=40, getForeignKeys_(ForeignKeysRequest, )=39} 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0); Time taken: 2.795 seconds 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:21:00,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0): CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:21:00,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:21:00,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_partitioned_table_0 2018-07-21T05:21:00,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0 2018-07-21T05:21:00,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:21:00,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:00,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:21:00,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:21:01,012 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-57_335_1786513459201617797-1 2018-07-21T05:21:01,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-57_335_1786513459201617797-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:21:01,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:21:01,028 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:21:01,028 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:21:01,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,028 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0 2018-07-21T05:21:01,028 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:21:01,030 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Localizing resource because it does not exist: /home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar to dest: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:01,047 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:01,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:21:01,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:01,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:01,048 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741836_1012, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar 2018-07-21T05:21:01,342 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:21:01,343 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175661341 for hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar 2018-07-21T05:21:01,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:21:01,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,344 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1) 2018-07-21T05:21:01,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\nCREATE EXTERNAL TABLE druid_partitioned_table_0\n STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\n TBLPROPERTIES (\n \"druid.segment.granularity\" = \"HOUR\",\n \"druid.query.granularity\" = \"MINUTE\",\n \"druid.segment.targetShardsPerGranularity\" = \"0\"\n )\n AS\n SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:21:01,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:21:01,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,354 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:21:01,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,375 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 3.00KB 2018-07-21T05:21:01,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:21:01,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:21:01,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10001 2018-07-21T05:21:01,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10002 2018-07-21T05:21:01,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-57_335_1786513459201617797-1 2018-07-21T05:21:01,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:21:01,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:21:01,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,406 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.19KB 2018-07-21T05:21:01,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:21:01,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0 } 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:01,495 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:21:01,496 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741837_1013, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_2.recovery 2018-07-21T05:21:01,520 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_2.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:21:01,832 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_2, dagName=CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1) 2018-07-21T05:21:01,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:01,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:02,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:02,439 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:21:02,441 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:21:02,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:02,945 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:21:04,646 WARN [NM Event dispatcher] containermanager.ContainerManagerImpl: couldn't find container container_1532175606211_0001_01_000003 while processing FINISH_CONTAINERS event 2018-07-21T05:21:05,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:05,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:05,476 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:21:07,341 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:21:07,341 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741838_1014, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/16e164989ca14b2ab99a151a888f3026/0_descriptor.json 2018-07-21T05:21:07,387 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/16e164989ca14b2ab99a151a888f3026/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:07,412 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:21:07,412 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741839_1015, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_20_59.997-07_00/0_index.zip 2018-07-21T05:21:07,435 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/16e164989ca14b2ab99a151a888f3026/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:07,466 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,466 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,466 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:21:07,466 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:07,466 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:07,467 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:07,467 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:07,467 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:21:07,467 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:21:07,468 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741840_1016, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/segmentsDescriptorDir/default.druid_partitioned_table_0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052059.997-0700.json 2018-07-21T05:21:07,503 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/segmentsDescriptorDir/default.druid_partitioned_table_0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052059.997-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:21:08,236 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:08,237 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741841_1017, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/ed8812d9913e4a25baa626d967ebaf2b/0_index.zip 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,271 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,272 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:21:08,272 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:21:08,272 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:08,272 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741842_1018, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/ed8812d9913e4a25baa626d967ebaf2b/0_descriptor.json 2018-07-21T05:21:08,295 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/ed8812d9913e4a25baa626d967ebaf2b/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:08,314 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/intermediateSegmentDir/default.druid_partitioned_table_0/ed8812d9913e4a25baa626d967ebaf2b/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:21:08,323 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,324 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,324 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:21:08,324 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:21:08,324 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741843_1019, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/segmentsDescriptorDir/default.druid_partitioned_table_0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052059.997-0700.json 2018-07-21T05:21:08,342 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0/segmentsDescriptorDir/default.druid_partitioned_table_0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052059.997-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:21:08,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:21:08,368 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741844_1020, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:21:08,379 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_43103607_51 2018-07-21T05:21:08,409 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_2.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:21:08,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:08,416 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:21:08,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@2288342c, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table_0 2018-07-21T05:21:08,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:08,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table_0 from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10002 2018-07-21T05:21:08,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:21:08,424 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:21:08,424 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_partitioned_table_0 2018-07-21T05:21:08,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_partitioned_table_0 on null 2018-07-21T05:21:08,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:21:08,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:21:08,427 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,428 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:21:08,428 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:21:08,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:21:08,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,438 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:21:08,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:21:08,531 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:21:08,735 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.SQLMetadataConnector: Table[druid_segments] already exists 2018-07-21T05:21:08,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: pre-create data source with name default.druid_partitioned_table_0 2018-07-21T05:21:08,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,747 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: create_table: Table(tableName:druid_partitioned_table_0, dbName:default, owner:hive_test_user, createTime:1532175668, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.segment.targetShardsPerGranularity=0, druid.datasource=default.druid_partitioned_table_0, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:21:08,747 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=create_table: Table(tableName:druid_partitioned_table_0, dbName:default, owner:hive_test_user, createTime:1532175668, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.segment.targetShardsPerGranularity=0, druid.datasource=default.druid_partitioned_table_0, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:21:08,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:21:08,754 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updating table stats for druid_partitioned_table_0 2018-07-21T05:21:08,754 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updated size of table druid_partitioned_table_0 to 0 2018-07-21T05:21:08,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:21:08,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:21:08,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: commit insert into table druid_partitioned_table_0 overwrite false 2018-07-21T05:21:09,000 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Moving [2] Druid segments from staging directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0] to Deep storage [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage] 2018-07-21T05:21:09,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:21:09,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:21:09,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:21:09,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:21:09,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:21:09,007 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hdfs.HdfsDataSegmentPusher: Configured HDFS as deep storage 2018-07-21T05:21:09,020 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Building timeline for umbrella Interval [1969-12-31T23:00:00.000Z/1970-01-01T01:00:00.000Z] 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:21:09,129 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:09,130 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741845_1021, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table_0/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_20_59.997-07_00/descriptor.json 2018-07-21T05:21:09,144 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table_0/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_20_59.997-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:21:09,164 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:21:09,164 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741846_1022, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table_0/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_20_59.997-07_00/descriptor.json 2018-07-21T05:21:09,179 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table_0/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_20_59.997-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:21:09,185 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00 2018-07-21T05:21:09,186 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00 2018-07-21T05:21:09,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: checking load status from coordinator localhost:8081 2018-07-21T05:21:09,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/status] starting 2018-07-21T05:21:09,231 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,281 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,281 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,284 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,284 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8081 2018-07-21T05:21:09,447 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:21:09 GMT Content-Type: application/json Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:21:09,447 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] Got response: 200 OK 2018-07-21T05:21:09,472 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@4b3746df 2018-07-21T05:21:09,473 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 408B, last=false 2018-07-21T05:21:09,476 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:21:09,476 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 0B, last=true 2018-07-21T05:21:09,483 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Waiting for the loading of [2] segments 2018-07-21T05:21:09,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:21:09,502 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:21:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:21:09,502 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:21:09,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:21:09,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:21:09,517 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:21:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:21:09,517 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:21:09,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:21:14,660 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:21:14,691 WARN [ContainersLauncher #0] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0001_01_000002 is : 143 2018-07-21T05:21:14,695 DEBUG [ContainersLauncher #0] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #0, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:21:16,833 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,833 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,834 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,836 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,841 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,841 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,851 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,851 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,851 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,852 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,852 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:16,869 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:21:19,231 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:21:19,293 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:21:39,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:21:39,527 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:21:39 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:21:39,527 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:21:39,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:21:39,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:21:39,534 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:21:39 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:21:39,534 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:21:39,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:21:49,231 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:21:49,294 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:22:09,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:22:09,540 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:22:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:22:09,540 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:22:09,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:22:09,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:22:09,548 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:22:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:22:09,548 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:22:09,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:22:19,231 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:22:19,294 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:22:39,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:22:39,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:22:39 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:22:39,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:22:39,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:22:39,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:22:39,560 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:22:39 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:22:39,560 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:22:39,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:22:49,232 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:22:49,295 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:23:09,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:23:09,566 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:09,566 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:23:09,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:23:09,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] starting 2018-07-21T05:23:09,571 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:09 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:09,571 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] Got response: 204 No Content 2018-07-21T05:23:09,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table_0/segments/default.druid_partitioned_table_0_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:20:59.997-07:00] response is [] 2018-07-21T05:23:19,232 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:23:19,295 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:23:39,574 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Wait time exhausted and we have [2] out of [2] segments not loaded yet 2018-07-21T05:23:39,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:23:39,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:23:39,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:39,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@druid_partitioned_table_0) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@druid_partitioned_table_0) Type=TABLE WriteType=DDL_NO_LOCK is present 2018-07-21T05:23:39,602 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-3:STATS] in serial mode 2018-07-21T05:23:39,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@505b5ddc, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:23:39,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:39,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:23:39,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:23:39,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:23:39,603 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:23:39,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:39,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:23:39,605 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:23:39,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:23:39,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:23:39,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:23:39,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:23:39,623 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,625 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Executing stats task 2018-07-21T05:23:39,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:23:39,630 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:23:39,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_partitioned_table_0/, numRows 2018-07-21T05:23:39,674 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_partitioned_table_0/, numRows, 9173: 2018-07-21T05:23:39,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_partitioned_table_0/, rawDataSize 2018-07-21T05:23:39,674 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_partitioned_table_0/, rawDataSize, 0: 2018-07-21T05:23:39,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:39,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:39,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,675 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.druid_partitioned_table_0 newtbl=druid_partitioned_table_0 2018-07-21T05:23:39,675 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.druid_partitioned_table_0 newtbl=druid_partitioned_table_0 2018-07-21T05:23:39,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,704 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Table default.druid_partitioned_table_0 stats: [numFiles=0, numRows=9173, totalSize=0, rawDataSize=0, numFilesErasureCoded=0] 2018-07-21T05:23:39,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsAggregator: About to delete stats tmp dir :hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1/-ext-10003 2018-07-21T05:23:39,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:23:39,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:23:39,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: database:default 2018-07-21T05:23:39,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@druid_partitioned_table_0 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.__time EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] 2018-07-21T05:23:39,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table_0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] 2018-07-21T05:23:39,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=29, getTable_(String, String, )=13} 2018-07-21T05:23:39,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052057_bf6417d3-4a9b-4964-9d4b-286dd2fa3ba0); Time taken: 159.592 seconds 2018-07-21T05:23:39,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:39,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "0" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-20-57_335_1786513459201617797-1 2018-07-21T05:23:39,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-20-57_335_1786513459201617797-1 2018-07-21T05:23:39,727 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 162.388 seconds 2018-07-21T05:23:39,727 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:23:39,727 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:23:39,727 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:23:39,727 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:23:39,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,730 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052339_453d6c73-5919-44f8-98cc-dffb6df99017): EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:23:39,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,735 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:23:39,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@431f2a86, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:23:39,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:39,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,735 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:23:39,735 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:23:39,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:23:39,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:23:39,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:39,740 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:39,741 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:23:39,741 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table position=30 2018-07-21T05:23:39,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,743 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:23:39,744 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:23:39,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:23:39,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,749 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:23:39,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,749 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,749 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,752 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:23:39,752 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:23:39,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,752 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,752 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,763 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:23:39,763 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:23:39,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,763 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,763 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_730_8859096223375137313-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:23:39,765 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:23:39,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,770 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,770 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,771 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,771 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,772 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,772 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:23:39,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:23:39,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:23:39,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,805 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,805 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,805 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,807 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,807 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:39,827 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table position=30 2018-07-21T05:23:39,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,827 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,827 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,829 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,829 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,830 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:23:39,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,830 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,830 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,840 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:23:39,840 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:23:39,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,840 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,840 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:23:39,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:23:39,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:23:39,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:23:39,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:23:39,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:23:39,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:23:39,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:39,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:39,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_730_8859096223375137313-1/-ext-10003 2018-07-21T05:23:39,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,846 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,846 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_730_8859096223375137313-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:23:39,849 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:23:39,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:23:39,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:23:39,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:23:39,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:23:39,860 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:23:39,861 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:23:39,861 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:23:39,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:23:39,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,863 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:23:39,863 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:23:39,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.519161ms + 0.025054ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:23:39,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,879 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:23:39,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:23:39,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:23:39,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:23:39,883 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:23:39,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:23:39,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,884 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,884 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,887 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,887 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:23:39,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:23:39,890 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_partitioned_table 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:23:39,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:23:39,891 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:23:39,891 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:23:39,891 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:23:39,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:23:39,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,892 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:23:39,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:23:39,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:23:39,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:23:39,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:Explain, type:string, comment:null)], properties:null) 2018-07-21T05:23:39,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:23:39,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, getDatabase_(String, )=14, getTable_(String, String, )=21, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=17, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:23:39,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052339_453d6c73-5919-44f8-98cc-dffb6df99017); Time taken: 0.175 seconds 2018-07-21T05:23:39,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:23:39,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:39,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052339_453d6c73-5919-44f8-98cc-dffb6df99017): EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:23:39,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,907 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-5:EXPLAIN] in serial mode 2018-07-21T05:23:39,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:23:39,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,979 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:23:39,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,980 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:23:39,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:23:39,980 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052339_453d6c73-5919-44f8-98cc-dffb6df99017); Time taken: 0.073 seconds 2018-07-21T05:23:39,980 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:23:39,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,980 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:39,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result file: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_730_8859096223375137313-1/-local-10004 2018-07-21T05:23:39,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_730_8859096223375137313-1 2018-07-21T05:23:39,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_730_8859096223375137313-1 2018-07-21T05:23:39,986 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.251 seconds, Fetched: 80 row(s) 2018-07-21T05:23:39,986 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:23:39,986 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:23:39,987 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:23:39,987 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,989 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6): CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:39,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:23:39,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:39,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:39,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:23:39,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table position=22 2018-07-21T05:23:39,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,992 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:39,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,994 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:39,994 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:39,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:39,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:39,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:23:39,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:23:39,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:39,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:39,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,007 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:23:40,007 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:23:40,008 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,008 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,008 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:23:40,009 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1 2018-07-21T05:23:40,012 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:23:40,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,015 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,015 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,016 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,016 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,018 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,018 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,020 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:23:40,020 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:23:40,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:23:40,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:23:40,074 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_partitioned_table position=22 2018-07-21T05:23:40,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,075 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:40,075 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:23:40,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,077 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,077 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,079 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:23:40,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,079 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,079 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:23:40,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,089 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:23:40,089 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:23:40,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,090 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,090 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:23:40,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:23:40,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:23:40,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:23:40,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:23:40,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:23:40,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:23:40,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:40,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:40,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:23:40,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,096 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,096 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:23:40,098 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,099 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:23:40,099 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:23:40,099 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:23:40,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:23:40,100 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:23:40,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:23:40,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:23:40,101 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:23:40,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:23:40,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:23:40,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,105 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:23:40,105 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:23:40,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.521227ms + 0.020279ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:23:40,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:23:40,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:23:40,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:23:40,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:23:40,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:23:40,121 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:23:40,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:23:40,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:23:40,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,123 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,123 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,125 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:23:40,125 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:23:40,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:23:40,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_partitioned_table 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:23:40,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:23:40,128 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:23:40,128 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:23:40,128 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:23:40,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=1, isCompatibleWith_(Configuration, )=1, getDatabase_(String, )=10, getTable_(String, String, )=21, flushCache_()=0, getUniqueConstraints_(UniqueConstraintsRequest, )=0, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=14, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6); Time taken: 0.14 seconds 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,129 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6): CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:40,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_partitioned_table 2018-07-21T05:23:40,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:23:40,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:23:40,132 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:23:40,144 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:23:40,171 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_989_7156558857288253356-1 2018-07-21T05:23:40,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_989_7156558857288253356-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:23:40,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:23:40,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6 2018-07-21T05:23:40,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,173 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1) 2018-07-21T05:23:40,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\n\n\nCREATE EXTERNAL TABLE druid_partitioned_table\nSTORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\nTBLPROPERTIES (\n\"druid.segment.granularity\" = \"HOUR\",\n\"druid.query.granularity\" = \"MINUTE\",\n\"druid.segment.targetShardsPerGranularity\" = \"6\"\n)\nAS\nSELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:23:40,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:23:40,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,176 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:23:40,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,178 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 3.10KB 2018-07-21T05:23:40,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:23:40,187 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:23:40,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10002 2018-07-21T05:23:40,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10001 2018-07-21T05:23:40,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,194 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_989_7156558857288253356-1 2018-07-21T05:23:40,196 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:23:40,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,196 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:23:40,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.58KB 2018-07-21T05:23:40,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:23:40,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6 } 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:23:40,285 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:40,285 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741847_1023, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_3.recovery 2018-07-21T05:23:40,316 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_3.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:23:40,334 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_3, dagName=CREATE EXTERNAL TABLE druid_partition...NULL (Stage-1) 2018-07-21T05:23:40,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:40,922 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:23:40,924 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:23:42,362 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:23:42,382 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:23:42,475 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:23:42,475 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:23:42,475 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:23:42,475 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:23:42,476 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:23:42,479 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:23:42,486 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:23:42,486 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:23:42,486 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:23:42,487 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:23:42,708 DEBUG [ContainersLauncher #1] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #1, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:23:43,713 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:23:43,714 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:23:43,946 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:23:46,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:46,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:23:49,232 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:23:49,295 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:23:49,987 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:23:50,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:50,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:50,494 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:51,772 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:51,773 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741848_1024, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/0337780af80d4c699cb6494a54ad9236/0_descriptor.json 2018-07-21T05:23:51,816 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/0337780af80d4c699cb6494a54ad9236/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:51,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:51,830 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741849_1025, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/0_index.zip 2018-07-21T05:23:51,843 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/0337780af80d4c699cb6494a54ad9236/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:51,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:51,872 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:51,872 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741850_1026, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700.json 2018-07-21T05:23:51,883 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,205 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,205 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,205 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,205 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:52,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,206 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741851_1027, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/79eb018cab744d3e88c6560e273ed46b/1_descriptor.json 2018-07-21T05:23:52,215 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/79eb018cab744d3e88c6560e273ed46b/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,220 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,220 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741852_1028, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/1_index.zip 2018-07-21T05:23:52,229 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/79eb018cab744d3e88c6560e273ed46b/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:52,239 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,240 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741853_1029, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_1.json 2018-07-21T05:23:52,249 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,322 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741854_1030, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/783773c68a4841569d80230e68eabe95/2_descriptor.json 2018-07-21T05:23:52,330 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/783773c68a4841569d80230e68eabe95/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:52,335 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,335 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741855_1031, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/2_index.zip 2018-07-21T05:23:52,343 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/783773c68a4841569d80230e68eabe95/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:52,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,352 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741856_1032, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_2.json 2018-07-21T05:23:52,360 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,428 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,428 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741857_1033, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/15c3abd9a21e4843b6d43d4a66a500bb/3_descriptor.json 2018-07-21T05:23:52,438 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/15c3abd9a21e4843b6d43d4a66a500bb/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:52,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,444 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741858_1034, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/3_index.zip 2018-07-21T05:23:52,452 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/15c3abd9a21e4843b6d43d4a66a500bb/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:52,460 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,460 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741859_1035, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_3.json 2018-07-21T05:23:52,468 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:52,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,534 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741860_1036, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/36a66c1ae9964c1480b263e479198cf0/4_descriptor.json 2018-07-21T05:23:52,545 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/36a66c1ae9964c1480b263e479198cf0/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:52,557 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,557 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741861_1037, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/4_index.zip 2018-07-21T05:23:52,566 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/36a66c1ae9964c1480b263e479198cf0/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,580 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,581 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741862_1038, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_4.json 2018-07-21T05:23:52,590 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:52,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,655 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741863_1039, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/d9ef7602f0894e4b9a0659a6b63c884a/5_descriptor.json 2018-07-21T05:23:52,664 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/d9ef7602f0894e4b9a0659a6b63c884a/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:52,669 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,669 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741864_1040, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/5_index.zip 2018-07-21T05:23:52,677 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/d9ef7602f0894e4b9a0659a6b63c884a/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,685 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:52,685 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:52,685 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:52,685 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,685 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741865_1041, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_5.json 2018-07-21T05:23:52,693 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052340.096-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:52,775 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,776 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741866_1042, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/33c6c029aad74750a322316e33f01697/0_descriptor.json 2018-07-21T05:23:52,789 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/33c6c029aad74750a322316e33f01697/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,795 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,795 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,795 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:52,795 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:52,796 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,796 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,796 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:52,796 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,796 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741867_1043, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/0_index.zip 2018-07-21T05:23:52,807 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/33c6c029aad74750a322316e33f01697/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:52,814 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:52,815 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741868_1044, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700.json 2018-07-21T05:23:52,822 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,076 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,077 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741869_1045, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/7f085bf1ced64e3c8bc5148a41d92fdc/1_index.zip 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,101 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,101 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741870_1046, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/7f085bf1ced64e3c8bc5148a41d92fdc/1_descriptor.json 2018-07-21T05:23:53,146 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/7f085bf1ced64e3c8bc5148a41d92fdc/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,169 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/7f085bf1ced64e3c8bc5148a41d92fdc/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:53,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,177 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741871_1047, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_1.json 2018-07-21T05:23:53,193 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,353 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,354 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,354 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741872_1048, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/c57eaf3e78244dbc9204c8db704d6052/2_descriptor.json 2018-07-21T05:23:53,362 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/c57eaf3e78244dbc9204c8db704d6052/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,368 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,369 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:53,369 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,369 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741873_1049, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/2_index.zip 2018-07-21T05:23:53,381 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/c57eaf3e78244dbc9204c8db704d6052/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,393 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,393 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,393 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,393 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,393 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741874_1050, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_2.json 2018-07-21T05:23:53,405 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,493 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,493 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741875_1051, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/fcc8d253bc154ad08c0669df60b6782f/3_descriptor.json 2018-07-21T05:23:53,505 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/fcc8d253bc154ad08c0669df60b6782f/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,511 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741876_1052, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/3_index.zip 2018-07-21T05:23:53,515 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:23:53,526 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/fcc8d253bc154ad08c0669df60b6782f/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,536 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,536 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741877_1053, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_3.json 2018-07-21T05:23:53,553 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,640 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,640 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741878_1054, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/377a08d281d64ed4930cdb4a2a19e1c6/4_descriptor.json 2018-07-21T05:23:53,651 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/377a08d281d64ed4930cdb4a2a19e1c6/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,658 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,658 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,658 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:53,658 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:53,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,659 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741879_1055, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/4_index.zip 2018-07-21T05:23:53,671 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/377a08d281d64ed4930cdb4a2a19e1c6/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,683 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:53,684 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:53,684 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741880_1056, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_4.json 2018-07-21T05:23:53,694 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:53,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,755 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741881_1057, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/e57ea21aed2846aa9e477bf56fa28e80/5_descriptor.json 2018-07-21T05:23:53,764 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/e57ea21aed2846aa9e477bf56fa28e80/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:53,774 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,774 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741882_1058, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/5_index.zip 2018-07-21T05:23:53,782 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/intermediateSegmentDir/default.druid_partitioned_table/e57ea21aed2846aa9e477bf56fa28e80/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:53,788 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,788 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741883_1059, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_5.json 2018-07-21T05:23:53,797 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052340.096-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,817 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,818 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:53,818 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:53,818 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,818 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741884_1060, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:23:53,826 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1913785004_31 2018-07-21T05:23:53,847 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_3.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:23:53,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:53,851 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:23:53,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:53,855 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:23:53,855 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:23:53,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@6111aa49, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:23:53,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:53,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:23:53,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:23:53,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:23:53,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:23:53,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:23:53,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:23:53,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:53,858 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10002 2018-07-21T05:23:53,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:23:53,866 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:23:53,867 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_partitioned_table 2018-07-21T05:23:53,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_partitioned_table on null 2018-07-21T05:23:53,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:53,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:23:53,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:23:53,871 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:23:53,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:23:53,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,876 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:23:53,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:23:53,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:23:53,893 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.SQLMetadataConnector: Table[druid_segments] already exists 2018-07-21T05:23:53,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: pre-create data source with name default.druid_partitioned_table 2018-07-21T05:23:53,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,898 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: create_table: Table(tableName:druid_partitioned_table, dbName:default, owner:hive_test_user, createTime:1532175833, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.segment.targetShardsPerGranularity=6, druid.datasource=default.druid_partitioned_table, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:23:53,899 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=create_table: Table(tableName:druid_partitioned_table, dbName:default, owner:hive_test_user, createTime:1532175833, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.segment.targetShardsPerGranularity=6, druid.datasource=default.druid_partitioned_table, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:23:53,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:23:53,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updating table stats for druid_partitioned_table 2018-07-21T05:23:53,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updated size of table druid_partitioned_table to 0 2018-07-21T05:23:53,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:23:53,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:23:53,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: commit insert into table druid_partitioned_table overwrite false 2018-07-21T05:23:53,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Moving [12] Druid segments from staging directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6] to Deep storage [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage] 2018-07-21T05:23:53,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:23:53,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:23:53,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:23:53,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:23:53,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:23:53,972 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hdfs.HdfsDataSegmentPusher: Configured HDFS as deep storage 2018-07-21T05:23:53,973 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Building timeline for umbrella Interval [1969-12-31T23:00:00.000Z/1970-01-01T01:00:00.000Z] 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:53,985 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:53,985 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741885_1061, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:53,995 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:23:54,012 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,012 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741886_1062, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,025 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:23:54,034 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:54,034 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741887_1063, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,044 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:54,052 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:54,053 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741888_1064, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,061 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,069 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,069 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,069 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:54,069 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:54,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:54,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:23:54,070 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741889_1065, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,080 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:23:54,091 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:54,091 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741890_1066, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,100 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,108 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,108 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:23:54,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,109 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741891_1067, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,118 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:54,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,126 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741892_1068, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,135 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:23:54,143 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,143 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741893_1069, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,151 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:23:54,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:23:54,162 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741894_1070, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,178 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:23:54,189 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,189 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741895_1071, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,202 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,213 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,213 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,213 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:23:54,213 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:23:54,214 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:23:54,214 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:23:54,214 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:23:54,214 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:23:54,214 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741896_1072, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:23:54,223 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:23:54,225 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00 2018-07-21T05:23:54,225 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1 2018-07-21T05:23:54,225 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2 2018-07-21T05:23:54,226 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3 2018-07-21T05:23:54,226 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4 2018-07-21T05:23:54,226 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5 2018-07-21T05:23:54,226 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00 2018-07-21T05:23:54,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1 2018-07-21T05:23:54,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2 2018-07-21T05:23:54,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3 2018-07-21T05:23:54,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4 2018-07-21T05:23:54,228 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5 2018-07-21T05:23:54,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: checking load status from coordinator localhost:8081 2018-07-21T05:23:54,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/status] starting 2018-07-21T05:23:54,242 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,243 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] Got response: 200 OK 2018-07-21T05:23:54,243 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@37a7f116 2018-07-21T05:23:54,243 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 408B, last=false 2018-07-21T05:23:54,243 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:23:54,243 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 0B, last=true 2018-07-21T05:23:54,247 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Waiting for the loading of [12] segments 2018-07-21T05:23:54,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:23:54,249 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,249 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:23:54,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:23:54,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:23:54,254 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,254 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:23:54,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:23:54,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:23:54,258 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,258 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:23:54,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:23:54,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:23:54,264 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,264 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:23:54,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:23:54,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:23:54,270 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,270 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:23:54,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:23:54,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:23:54,274 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,274 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:23:54,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:23:54,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:23:54,278 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,278 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:23:54,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:23:54,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:23:54,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:23:54,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:23:54,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:23:54,299 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,299 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:23:54,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:23:54,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:23:54,307 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,307 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:23:54,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:23:54,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:23:54,311 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,311 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:23:54,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:23:54,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:23:54,314 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:23:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:23:54,314 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:23:54,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:24:02,598 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:24:02,624 WARN [ContainersLauncher #1] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0001_01_000004 is : 143 2018-07-21T05:24:02,626 DEBUG [ContainersLauncher #1] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #1, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:24:04,766 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,767 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,767 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,767 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,768 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,771 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,773 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,774 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,773 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,774 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,773 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:04,780 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:24:19,233 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:24:19,295 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:24:24,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:24:24,317 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,318 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:24:24,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:24:24,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:24:24,322 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,322 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:24:24,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:24:24,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:24:24,325 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,325 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:24:24,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:24:24,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:24:24,329 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,329 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:24:24,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:24:24,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:24:24,332 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,332 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:24:24,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:24:24,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:24:24,335 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,335 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:24:24,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:24:24,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:24:24,338 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,338 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:24:24,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:24:24,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:24:24,342 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,342 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:24:24,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:24:24,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:24:24,345 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,345 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:24:24,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:24:24,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:24:24,348 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,348 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:24:24,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:24:24,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:24:24,351 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,352 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:24:24,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:24:24,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:24:24,355 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:24,355 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:24:24,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:24:49,233 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:24:49,296 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:24:54,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:24:54,359 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,359 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:24:54,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:24:54,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:24:54,364 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,364 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:24:54,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:24:54,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:24:54,368 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,368 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:24:54,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:24:54,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:24:54,372 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,372 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:24:54,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:24:54,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:24:54,376 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,376 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:24:54,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:24:54,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:24:54,385 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,385 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:24:54,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:24:54,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:24:54,390 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,390 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:24:54,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:24:54,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:24:54,406 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,406 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:24:54,409 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:24:54,413 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:24:54,415 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,415 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:24:54,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:24:54,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:24:54,420 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,420 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:24:54,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:24:54,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:24:54,424 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,424 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:24:54,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:24:54,426 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:24:54,428 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:24:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:24:54,428 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:24:54,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:25:19,233 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:25:19,296 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:25:24,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:25:24,432 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,432 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:25:24,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:25:24,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:25:24,436 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,436 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:25:24,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:25:24,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:25:24,441 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,441 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:25:24,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:25:24,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:25:24,445 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,445 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:25:24,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:25:24,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:25:24,448 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,448 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:25:24,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:25:24,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:25:24,451 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,451 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:25:24,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:25:24,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:25:24,454 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,454 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:25:24,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:25:24,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:25:24,458 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,458 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:25:24,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:25:24,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:25:24,460 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,460 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:25:24,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:25:24,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:25:24,462 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,462 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:25:24,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:25:24,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:25:24,471 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,471 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:25:24,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:25:24,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:25:24,484 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:24 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:24,484 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:25:24,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:25:49,233 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:25:49,296 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:25:54,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:25:54,487 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,487 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:25:54,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:25:54,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:25:54,492 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,492 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:25:54,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:25:54,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:25:54,497 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,497 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:25:54,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:25:54,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:25:54,501 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,501 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:25:54,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:25:54,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:25:54,505 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,506 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:25:54,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:25:54,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] starting 2018-07-21T05:25:54,508 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,508 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] Got response: 204 No Content 2018-07-21T05:25:54,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00] response is [] 2018-07-21T05:25:54,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] starting 2018-07-21T05:25:54,512 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,512 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] Got response: 204 No Content 2018-07-21T05:25:54,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_3] response is [] 2018-07-21T05:25:54,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] starting 2018-07-21T05:25:54,519 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,519 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] Got response: 204 No Content 2018-07-21T05:25:54,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_1] response is [] 2018-07-21T05:25:54,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:25:54,531 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,531 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:25:54,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:25:54,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] starting 2018-07-21T05:25:54,535 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,535 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] Got response: 204 No Content 2018-07-21T05:25:54,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_2] response is [] 2018-07-21T05:25:54,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] starting 2018-07-21T05:25:54,539 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,539 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] Got response: 204 No Content 2018-07-21T05:25:54,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_5] response is [] 2018-07-21T05:25:54,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] starting 2018-07-21T05:25:54,546 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:25:54 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:25:54,546 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] Got response: 204 No Content 2018-07-21T05:25:54,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_4] response is [] 2018-07-21T05:26:19,234 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:26:19,297 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:26:24,547 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Wait time exhausted and we have [12] out of [12] segments not loaded yet 2018-07-21T05:26:24,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,565 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,565 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=DDL_NO_LOCK is present 2018-07-21T05:26:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-3:STATS] in serial mode 2018-07-21T05:26:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@6ed256b2, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:26:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:26:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:26:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:26:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:26:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:26:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:26:24,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:26:24,579 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:26:24,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:26:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,582 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:26:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,582 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,582 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,591 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Executing stats task 2018-07-21T05:26:24,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:26:24,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:26:24,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_partitioned_table/, numRows 2018-07-21T05:26:24,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_partitioned_table/, numRows, 9173: 2018-07-21T05:26:24,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_partitioned_table/, rawDataSize 2018-07-21T05:26:24,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_partitioned_table/, rawDataSize, 0: 2018-07-21T05:26:24,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:24,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:24,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:26:24,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:26:24,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Table default.druid_partitioned_table stats: [numFiles=0, numRows=9173, totalSize=0, rawDataSize=0, numFilesErasureCoded=0] 2018-07-21T05:26:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsAggregator: About to delete stats tmp dir :hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1/-ext-10003 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: database:default 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@druid_partitioned_table 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.__time EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] 2018-07-21T05:26:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_partitioned_table.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,640 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:26:24,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:26:24,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=24, getTable_(String, String, )=9} 2018-07-21T05:26:24,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052339_d102433d-2093-465f-a55b-831e6c9c43d6); Time taken: 164.511 seconds 2018-07-21T05:26:24,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:26:24,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:24,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE", "druid.segment.targetShardsPerGranularity" = "6" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:26:24,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-23-39_989_7156558857288253356-1 2018-07-21T05:26:24,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-23-39_989_7156558857288253356-1 2018-07-21T05:26:24,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 164.652 seconds 2018-07-21T05:26:24,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:24,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:26:24,643 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:24,643 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:26:24,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052624_4e83e3ca-e2d9-4f4a-9787-a361167d234a): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:26:24,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,646 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:24,646 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:26:24,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:26:24,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:24,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,647 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,656 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:24,656 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:24,659 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1 2018-07-21T05:26:24,659 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:26:24,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:24,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,667 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,667 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,669 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,669 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,670 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:26:24,670 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:26:24,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:24,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:24,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:24,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:24,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:24,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:24,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,770 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:26:24,770 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:26:24,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:24,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,776 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:24,776 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:24,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:24,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,810 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after top-level introduceDerivedTable HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(_c0=[$0], _c1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:24,814 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:24,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,814 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,814 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:24,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:24,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:24,823 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:24,823 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:24,825 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1 2018-07-21T05:26:24,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:26:24,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for druid_partitioned_table TS[0] 2018-07-21T05:26:24,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:24,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f0) _c0) (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f1) _c1)) 2018-07-21T05:26:24,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:26:24,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:26:24,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:26:24,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-24_645_5897888312578008068-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001 2018-07-21T05:26:24,828 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-24_645_5897888312578008068-1 2018-07-21T05:26:24,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-24_645_5897888312578008068-1/-ext-10003 2018-07-21T05:26:24,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001 row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:26:24,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:26:24,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:24,830 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:26:24,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-SEL[1]-FS[2] 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(2) 2018-07-21T05:26:24,831 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:26:24,831 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-SEL[1]-FS[2] 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-SEL[1]-LIST_SINK[3] 2018-07-21T05:26:24,848 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:26:24,848 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:26:24,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:26:24,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:26:24,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,848 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:$f0, type:bigint, comment:null), FieldSchema(name:$f1, type:bigint, comment:null)], properties:null) 2018-07-21T05:26:24,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:26:24,852 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing operator TS[0] 2018-07-21T05:26:24,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS 2018-07-21T05:26:24,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Operator 0 TS initialized 2018-07-21T05:26:24,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing children of 0 TS 2018-07-21T05:26:24,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing child 1 SEL 2018-07-21T05:26:24,853 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing operator SEL[1] 2018-07-21T05:26:24,858 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: SELECT struct<$f0:bigint,$f1:bigint> 2018-07-21T05:26:24,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL 2018-07-21T05:26:24,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Operator 1 SEL initialized 2018-07-21T05:26:24,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing children of 1 SEL 2018-07-21T05:26:24,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing child 3 LIST_SINK 2018-07-21T05:26:24,858 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[3] 2018-07-21T05:26:24,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK 2018-07-21T05:26:24,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 3 LIST_SINK initialized 2018-07-21T05:26:24,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK done is reset. 2018-07-21T05:26:24,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL done is reset. 2018-07-21T05:26:24,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS done is reset. 2018-07-21T05:26:24,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=12, getTable_(String, String, )=18, flushCache_()=0, getAllDatabases_()=6, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getForeignKeys_(ForeignKeysRequest, )=0} 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052624_4e83e3ca-e2d9-4f4a-9787-a361167d234a); Time taken: 0.223 seconds 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052624_4e83e3ca-e2d9-4f4a-9787-a361167d234a): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@druid_partitioned_table 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@druid_partitioned_table 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:26:24,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:26:24,870 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052624_4e83e3ca-e2d9-4f4a-9787-a361167d234a); Time taken: 0.0 seconds 2018-07-21T05:26:24,870 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:26:24,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:24,870 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:24,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:26:25,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<$f0:bigint,$f1:bigint> 2018-07-21T05:26:25,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175984, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} partition properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175984, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} 2018-07-21T05:26:25,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidQueryRecordReader: Retrieving data from druid using query: TimeseriesQuery{dataSource='default.druid_partitioned_table', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}} 2018-07-21T05:26:25,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] starting 2018-07-21T05:26:25,018 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,023 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,024 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,025 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,026 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,027 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,027 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,029 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,030 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,031 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,031 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,033 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,034 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,035 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,036 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,037 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,037 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:26:25,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:26:25 GMT Content-Type: application/x-jackson-smile X-Druid-Query-Id: 7d38bbd5-a6f8-4c5a-bdb0-6357dbb3af3a X-Druid-Response-Context: {} Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:25,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got response: 200 OK 2018-07-21T05:26:25,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@2fae9944 2018-07-21T05:26:25,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 6B, last=false 2018-07-21T05:26:25,125 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:26:25,125 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 0B, last=true 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: close called for operator TS[0] 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing operator TS[0] 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_TS_0:0, 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing child = SEL[1] 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: close called for operator SEL[1] 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing operator SEL[1] 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_SEL_1:0, 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing child = LIST_SINK[3] 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[3] 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[3] 2018-07-21T05:26:25,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_3:0, 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 3 Close done 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: 1 Close done 2018-07-21T05:26:25,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: 0 Close done 2018-07-21T05:26:25,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001 2018-07-21T05:26:25,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-24_645_5897888312578008068-1 2018-07-21T05:26:25,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-24_645_5897888312578008068-1 2018-07-21T05:26:25,138 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.226 seconds 2018-07-21T05:26:25,138 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:25,138 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:26:25,138 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:25,138 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:26:25,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:26:25,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,140 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052625_5e0aede1-f699-4c44-a4f1-3cf5b7e2a834): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:26:25,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:26:25,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:25,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,141 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,151 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:25,151 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:25,154 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1 2018-07-21T05:26:25,154 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:26:25,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:25,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,157 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,157 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,158 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,158 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,159 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table_0 2018-07-21T05:26:25,159 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table_0 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:26:25,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:25,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:25,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:25,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:25,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,183 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:26:25,183 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:26:25,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:25,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,184 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:25,184 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:25,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:25,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after top-level introduceDerivedTable HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(_c0=[$0], _c1=[$1]) DruidQuery(table=[[default, druid_partitioned_table_0]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:26:25,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:25,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:26:25,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,211 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:25,211 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:25,212 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for druid_partitioned_table_0 TS[0] 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB druid_partitioned_table_0{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB druid_partitioned_table_0{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (. (tok_table_or_col druid_partitioned_table_0) $f0) _c0) (tok_selexpr (. (tok_table_or_col druid_partitioned_table_0) $f1) _c1)) 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = druid_partitioned_table_0{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:26:25,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:26:25,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_140_5107540076871386620-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001 2018-07-21T05:26:25,214 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_140_5107540076871386620-1 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_140_5107540076871386620-1/-ext-10003 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001 row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:25,216 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-SEL[1]-FS[2] 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,216 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(2) 2018-07-21T05:26:25,216 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:26:25,216 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-SEL[1]-FS[2] 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-SEL[1]-LIST_SINK[3] 2018-07-21T05:26:25,218 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:26:25,218 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:26:25,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:26:25,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:26:25,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,218 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:$f0, type:bigint, comment:null), FieldSchema(name:$f1, type:bigint, comment:null)], properties:null) 2018-07-21T05:26:25,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:26:25,219 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing operator TS[0] 2018-07-21T05:26:25,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS 2018-07-21T05:26:25,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Operator 0 TS initialized 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing children of 0 TS 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing child 1 SEL 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing operator SEL[1] 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: SELECT struct<$f0:bigint,$f1:bigint> 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Operator 1 SEL initialized 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing children of 1 SEL 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing child 3 LIST_SINK 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[3] 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 3 LIST_SINK initialized 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK done is reset. 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL done is reset. 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS done is reset. 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=19, flushCache_()=0, getAllDatabases_()=1, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=1, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052625_5e0aede1-f699-4c44-a4f1-3cf5b7e2a834); Time taken: 0.081 seconds 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052625_5e0aede1-f699-4c44-a4f1-3cf5b7e2a834): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@druid_partitioned_table_0 2018-07-21T05:26:25,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@druid_partitioned_table_0 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052625_5e0aede1-f699-4c44-a4f1-3cf5b7e2a834); Time taken: 0.001 seconds 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,221 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:25,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table_0 2018-07-21T05:26:25,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<$f0:bigint,$f1:bigint> 2018-07-21T05:26:25,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table_0, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table_0, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table_0 { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175819, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table_0","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=0, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table_0} partition properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table_0, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table_0, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table_0 { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175819, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table_0","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=0, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table_0} 2018-07-21T05:26:25,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidQueryRecordReader: Retrieving data from druid using query: TimeseriesQuery{dataSource='default.druid_partitioned_table_0', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}} 2018-07-21T05:26:25,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] starting 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:26:25 GMT Content-Type: application/x-jackson-smile X-Druid-Query-Id: 3106d63c-376d-49cc-9eb2-41cdd2e74170 X-Druid-Response-Context: {} Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got response: 200 OK 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@b685f46 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 6B, last=false 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:26:25,257 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 0B, last=true 2018-07-21T05:26:25,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: close called for operator TS[0] 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing operator TS[0] 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_TS_0:0, 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing child = SEL[1] 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: close called for operator SEL[1] 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing operator SEL[1] 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_SEL_1:0, 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing child = LIST_SINK[3] 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[3] 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[3] 2018-07-21T05:26:25,261 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_3:0, 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 3 Close done 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: 1 Close done 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: 0 Close done 2018-07-21T05:26:25,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001 2018-07-21T05:26:25,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1 2018-07-21T05:26:25,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_140_5107540076871386620-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_140_5107540076871386620-1 2018-07-21T05:26:25,264 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.082 seconds 2018-07-21T05:26:25,264 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:25,264 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:26:25,264 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:25,264 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:25,266 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5): SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:25,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:26:25,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,270 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:25,270 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:26:25,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: floor_hour 2018-07-21T05:26:25,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:26:25,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:25,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,271 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,280 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:25,280 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:25,282 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:25,283 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:26:25,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,285 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,285 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,287 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,287 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,288 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,288 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,289 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,289 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,290 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:25,290 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:25,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: floor_hour 2018-07-21T05:26:25,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: floor_hour 2018-07-21T05:26:25,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [timestamp with local time zone('US/Pacific')] accepted = [timestamp with local time zone('US/Pacific')] method = public org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable org.apache.hadoop.hive.ql.udf.UDFDateFloor.evaluate(org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable) 2018-07-21T05:26:25,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:25,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveSortLimit(offset=[0], fetch=[10]) HiveProject(__time=[$0], cstring1=[$1], cstring2=[$2], cdouble=[$3], cfloat=[$4], ctinyint=[$5], csmallint=[$6], cint=[$7], cbigint=[$8], cboolean1=[$9], cboolean2=[$10]) HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11], ctinyint1=[$0], csmallint1=[$1], cint1=[$2], cbigint1=[$3], cfloat1=[$4], cdouble1=[$5], cstring11=[$6], cstring21=[$7], ctimestamp1=[$8], ctimestamp2=[$9], cboolean11=[$10], cboolean21=[$11], block__offset__inside__file=[$12], input__file__name=[$13], row__id=[$14]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveSortLimit(offset=[0], fetch=[10]) HiveProject(__time=[$0], cstring1=[$1], cstring2=[$2], cdouble=[$3], cfloat=[$4], ctinyint=[$5], csmallint=[$6], cint=[$7], cbigint=[$8], cboolean1=[$9], cboolean2=[$10]) HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11], ctinyint1=[$0], csmallint1=[$1], cint1=[$2], cbigint1=[$3], cfloat1=[$4], cdouble1=[$5], cstring11=[$6], cstring21=[$7], ctimestamp1=[$8], ctimestamp2=[$9], cboolean11=[$10], cboolean21=[$11], block__offset__inside__file=[$12], input__file__name=[$13], row__id=[$14]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveSortLimit(offset=[0], fetch=[10]) HiveProject(__time=[$0], cstring1=[$1], cstring2=[$2], cdouble=[$3], cfloat=[$4], ctinyint=[$5], csmallint=[$6], cint=[$7], cbigint=[$8], cboolean1=[$9], cboolean2=[$10]) HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11], ctinyint1=[$0], csmallint1=[$1], cint1=[$2], cbigint1=[$3], cfloat1=[$4], cdouble1=[$5], cstring11=[$6], cstring21=[$7], ctimestamp1=[$8], ctimestamp2=[$9], cboolean11=[$10], cboolean21=[$11], block__offset__inside__file=[$12], input__file__name=[$13], row__id=[$14]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:26:25,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:26:25,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:25,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,339 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:25,339 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:25,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:25,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveSortLimit(sort0=[$0], sort1=[$2], sort2=[$1], dir0=[ASC-nulls-first], dir1=[DESC-nulls-last], dir2=[DESC-nulls-last], fetch=[10]) HiveProject(__time=[FLOOR_HOUR(CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15), FLAG(HOUR))], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:25,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: floor_hour 2018-07-21T05:26:25,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:25,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:25,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:25,366 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:25,368 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:25,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:26:25,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:25,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:25,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function floor_hour (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1))) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:26:25,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:26:25,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [timestamp with local time zone('US/Pacific')] accepted = [timestamp with local time zone('US/Pacific')] method = public org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable org.apache.hadoop.hive.ql.udf.UDFDateFloor.evaluate(org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable) 2018-07-21T05:26:25,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:25,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:26:25,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created LimitOperator Plan for clause: insclause-0 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:25,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 2018-07-21T05:26:25,375 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1/-ext-10003 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:25,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-RS[3]-SEL[4]-LIM[5]-FS[6] 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(6) 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for LIM(5) 2018-07-21T05:26:25,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(4) 2018-07-21T05:26:25,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for RS(3) 2018-07-21T05:26:25,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:26:25,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:26:25,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:26:25,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[7]-SEL[2]-RS[3]-SEL[4]-LIM[5]-FS[6] 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 3 key:[Column[_col0], Column[_col2], Column[_col1]] 2018-07-21T05:26:25,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 3 oldColExprMap: {VALUE._col2=Column[_col5], VALUE._col3=Column[_col6], VALUE._col4=Column[_col7], VALUE._col5=Column[_col8], VALUE._col0=Column[_col3], VALUE._col1=Column[_col4], KEY.reducesinkkey0=Column[_col0], KEY.reducesinkkey1=Column[_col2], KEY.reducesinkkey2=Column[_col1], VALUE._col6=Column[_col9], VALUE._col7=Column[_col10]} 2018-07-21T05:26:25,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 3 newColExprMap: {VALUE._col2=Column[_col5], VALUE._col3=Column[_col6], VALUE._col4=Column[_col7], VALUE._col5=Column[_col8], VALUE._col0=Column[_col3], VALUE._col1=Column[_col4], KEY.reducesinkkey0=Column[_col0], KEY.reducesinkkey1=Column[_col2], KEY.reducesinkkey2=Column[_col1], VALUE._col6=Column[_col9], VALUE._col7=Column[_col10]} 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[7]-SEL[2]-RS[3]-SEL[4]-LIM[5]-FS[6] 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:26:25,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[7] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[7] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[7] conditions:ctimestamp1 is not null 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[7] conditions:ctimestamp1 is not null 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_7 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFBridge ==> floor_hour (GenericUDFToTimestampLocalTZ(Column[ctimestamp1])) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(KEY.reducesinkkey0: timestamp with local time zone|{null}__time,KEY.reducesinkkey2: string|{null}cstring1,KEY.reducesinkkey1: string|{null}cstring2,VALUE._col0: double|{null}cdouble,VALUE._col1: float|{null}cfloat,VALUE._col2: tinyint|{null}ctinyint,VALUE._col3: smallint|{null}csmallint,VALUE._col4: int|{null}cint,VALUE._col5: bigint|{null}cbigint,VALUE._col6: boolean|{null}cboolean1,VALUE._col7: boolean|{null}cboolean2) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[4] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[4] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[KEY.reducesinkkey0] Column[KEY.reducesinkkey2] Column[KEY.reducesinkkey1] Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7]) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:LIM[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_4 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator LIM[5] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: timestamp with local time zone|{},_col1: string|{},_col2: string|{},_col3: double|{},_col4: float|{},_col5: tinyint|{},_col6: smallint|{},_col7: int|{},_col8: bigint|{},_col9: boolean|{},_col10: boolean|{}) 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op LIM_5 {} 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:26:25,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:25,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:25,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.621791ms + 0.015921ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:26:25,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:25,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:25,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[7] 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[3] 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, KEY.reducesinkkey0= colName: KEY.reducesinkkey0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, KEY.reducesinkkey1= colName: KEY.reducesinkkey1 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, KEY.reducesinkkey2= colName: KEY.reducesinkkey2 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[4] 2018-07-21T05:26:25,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.LimitOperator: Setting stats (Num rows: 10 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE) on LIM[5] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-LIM[5]: numRows: 10 dataSize: 2240 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 1 numNulls: 3 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 1 numNulls: 3 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 5 numNulls: 3 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5 numNulls: 3 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 1 numNulls: 3 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5 numNulls: 3 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5 numNulls: 3 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 1 numNulls: 3 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5 numNulls: 3 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 5 numNulls: 3 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[6]: numRows: 10 dataSize: 2240 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 1 numNulls: 6 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 1 numNulls: 6 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 5 numNulls: 6 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5 numNulls: 6 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 1 numNulls: 6 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5 numNulls: 6 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5 numNulls: 6 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 1 numNulls: 6 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5 numNulls: 6 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 5 numNulls: 6 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[7] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[KEY.reducesinkkey0, KEY.reducesinkkey1, KEY.reducesinkkey2]]; sort column names: [[KEY.reducesinkkey0, KEY.reducesinkkey1, KEY.reducesinkkey2]]; bucket count: -1; bucketing version: 2 }) on RS[3] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: [[_col0, _col2, _col1]]; sort column names: [[_col0, _col2, _col1]]; bucket count: -1; bucketing version: 2 }) on SEL[4] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.LimitOperator: Setting traits ({ bucket column names: [[_col0, _col2, _col1]]; sort column names: [[_col0, _col2, _col1]]; bucket count: -1; bucketing version: 2 }) on LIM[5] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: [[_col0, _col2, _col1]]; sort column names: [[_col0, _col2, _col1]]; bucket count: -1; bucketing version: 2 }) on FS[6] 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Number of reducers determined to be: 1 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 6 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: LIM, 5 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 4 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 3 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 7 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:26:25,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[7] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[7] 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[7] conditions:ctimestamp1 is not null 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[7] conditions:ctimestamp1 is not null 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_7 {} 2018-07-21T05:26:25,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFBridge ==> floor_hour (GenericUDFToTimestampLocalTZ(Column[ctimestamp1])) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(KEY.reducesinkkey0: timestamp with local time zone|{null}__time,KEY.reducesinkkey2: string|{null}cstring1,KEY.reducesinkkey1: string|{null}cstring2,VALUE._col0: double|{null}cdouble,VALUE._col1: float|{null}cfloat,VALUE._col2: tinyint|{null}ctinyint,VALUE._col3: smallint|{null}csmallint,VALUE._col4: int|{null}cint,VALUE._col5: bigint|{null}cbigint,VALUE._col6: boolean|{null}cboolean1,VALUE._col7: boolean|{null}cboolean2) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[3] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[4] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_3 {} 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[4] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[KEY.reducesinkkey0] Column[KEY.reducesinkkey2] Column[KEY.reducesinkkey1] Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7]) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:LIM[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_4 {} 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator LIM[5] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: timestamp with local time zone|{},_col1: string|{},_col2: string|{},_col3: double|{},_col4: float|{},_col5: tinyint|{},_col6: smallint|{},_col7: int|{},_col8: bigint|{},_col9: boolean|{},_col10: boolean|{}) 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op LIM_5 {} 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[6] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[3] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:26:25,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[3] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[4] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[6] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[4] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[3] with following reduce work: Reducer 2 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[3] as parent from SEL[4] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[6] 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:26:25,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:26:25,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:26:25,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:26:25,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:26:25,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,399 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:26:25,399 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:26:25,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:26:25,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:26:25,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,399 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[8] 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 8 LIST_SINK 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 8 LIST_SINK initialized 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 8 LIST_SINK done is reset. 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, isCompatibleWith_(Configuration, )=1, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=16, flushCache_()=0, getAllDatabases_()=2, getUniqueConstraints_(UniqueConstraintsRequest, )=0, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=10, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5); Time taken: 0.134 seconds 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:25,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,400 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5): SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:25,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 2018-07-21T05:26:25,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:26:25,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:26:25,401 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:26:25,413 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:26:25,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:25,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:26:25,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:26:25,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5 2018-07-21T05:26:25,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: No local resources to process (other than hive-exec) 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,436 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: SELECT floor_hour(cast(`ctimestamp1` as...10 (Stage-1) 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\nSELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 "} 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:26:25,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,438 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:26:25,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,440 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 2.78KB 2018-07-21T05:26:25,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 2018-07-21T05:26:25,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1/-ext-10002 2018-07-21T05:26:25,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,454 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:25,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:26:25,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:26:25,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,458 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 4.92KB 2018-07-21T05:26:25,468 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:26:25,468 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,472 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=SELECT floor_hour(cast(`ctimestamp1` as...10 (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5 } 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:25,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:25,507 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741897_1073, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_4.recovery 2018-07-21T05:26:25,546 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_4.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:26:25,552 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_4, dagName=SELECT floor_hour(cast(`ctimestamp1` as...10 (Stage-1) 2018-07-21T05:26:25,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:25,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:26,106 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:26,106 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:26:26,107 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:26:27,714 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:26:27,726 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:26:27,727 DEBUG [ContainersLauncher #2] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #2, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:26:29,127 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:26:31,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:31,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:26:34,664 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:26:35,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,167 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:26:35,308 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:35,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:35,309 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741898_1074, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1/_task_tmp.-ext-10002/_tmp.000000_0 2018-07-21T05:26:35,358 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1/_task_tmp.-ext-10002/_tmp.000000_0 is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1711580000_30 2018-07-21T05:26:35,391 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_4.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:26:35,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:26:35,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: TaskId for 000000_0 = 000000 2018-07-21T05:26:35,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:35,408 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:26:35,408 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:26:35,408 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:26:35,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052625_f24c78b2-dd53-434e-a5da-e387361ed4d5); Time taken: 10.015 seconds 2018-07-21T05:26:35,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10 2018-07-21T05:26:35,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: FetchOperator get writeIdStr: null 2018-07-21T05:26:35,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Time taken to get FileStatuses: 3 2018-07-21T05:26:35,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Total input files to process : 1 2018-07-21T05:26:35,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] mapred.FileInputFormat: Total # of splits generated by getSplits: 1, TimeTaken: 3 2018-07-21T05:26:35,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<_col0:timestamp with local time zone,_col1:string,_col2:string,_col3:double,_col4:float,_col5:tinyint,_col6:smallint,_col7:int,_col8:bigint,_col9:boolean,_col10:boolean> 2018-07-21T05:26:35,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns=_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10, serialization.escape.crlf=true, serialization.lib=org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, hive.serialization.extend.additional.nesting.levels=true, serialization.format=1, columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, escape.delim=\} partition properties: {columns=_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10, serialization.escape.crlf=true, serialization.lib=org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, hive.serialization.extend.additional.nesting.levels=true, serialization.format=1, columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, escape.delim=\} 2018-07-21T05:26:35,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[8] 2018-07-21T05:26:35,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[8] 2018-07-21T05:26:35,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_8:10, 2018-07-21T05:26:35,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 8 Close done 2018-07-21T05:26:35,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001 2018-07-21T05:26:35,444 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:35,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-25_266_2778072552632982109-1/-mr-10001/.hive-staging_hive_2018-07-21_05-26-25_266_2778072552632982109-1 2018-07-21T05:26:35,446 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 10.149 seconds, Fetched: 10 row(s) 2018-07-21T05:26:35,446 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:35,446 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:26:35,446 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:35,446 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:26:35,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:26:35,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,447 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052635_70923f90-1bba-4907-82bd-48ac150660e8): EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:26:35,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,450 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,450 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:26:35,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:26:35,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:35,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,459 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,460 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:35,460 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:35,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,460 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,460 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,468 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:26:35,469 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,470 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,471 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,471 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,472 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,472 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,473 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,473 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,474 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:35,474 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:35,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:35,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp2=[$9], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp2=[$9], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,496 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:26:35,496 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:26:35,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:35,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,497 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:35,497 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:35,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:35,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,519 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:35,519 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,519 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,519 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,527 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:35,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:35,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:26:35,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:35,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:35,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp2)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:26:35,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:26:35,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:35,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:26:35,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,540 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,540 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getCheckConstraints: directsql : SELECT "DBS"."NAME", "TBLS"."TBL_NAME",CASE WHEN "COLUMNS_V2"."COLUMN_NAME" IS NOT NULL THEN "COLUMNS_V2"."COLUMN_NAME" ELSE "PARTITION_KEYS"."PKEY_NAME" END, "KEY_CONSTRAINTS"."CONSTRAINT_NAME", "KEY_CONSTRAINTS"."ENABLE_VALIDATE_RELY", "KEY_CONSTRAINTS"."DEFAULT_VALUE" from "TBLS" INNER JOIN "KEY_CONSTRAINTS" ON "TBLS"."TBL_ID" = "KEY_CONSTRAINTS"."PARENT_TBL_ID" INNER JOIN "DBS" ON "TBLS"."DB_ID" = "DBS"."DB_ID" LEFT OUTER JOIN "COLUMNS_V2" ON "COLUMNS_V2"."CD_ID" = "KEY_CONSTRAINTS"."PARENT_CD_ID" AND "COLUMNS_V2"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" LEFT OUTER JOIN "PARTITION_KEYS" ON "TBLS"."TBL_ID" = "PARTITION_KEYS"."TBL_ID" AND "PARTITION_KEYS"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" WHERE "KEY_CONSTRAINTS"."CONSTRAINT_TYPE" = 5 AND "DBS"."CTLG_NAME" = ? AND "DBS"."NAME" = ? AND "TBLS"."TBL_NAME" = ? 2018-07-21T05:26:35,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_448_816679318867150486-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_448_816679318867150486-1/-ext-10000 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:35,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:26:35,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:26:35,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:26:35,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:26:35,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:26:35,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:26:35,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:26:35,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp2 is not null (type: boolean) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:26:35,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:26:35,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,563 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:35,563 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:35,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.362848ms + 0.013434ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:26:35,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:26:35,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:26:35,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:26:35,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:26:35,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:26:35,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:26:35,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:26:35,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:26:35,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp2 is not null 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-26-35_448_816679318867150486 2018-07-21T05:26:35,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-26-35_448_816679318867150486 2018-07-21T05:26:35,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:26:35,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:26:35,584 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:Explain, type:string, comment:null)], properties:null) 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=34, flushCache_()=0, getAllDatabases_()=1, getCheckConstraints_(CheckConstraintsRequest, )=13, getUniqueConstraints_(UniqueConstraintsRequest, )=0, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=10, getForeignKeys_(ForeignKeysRequest, )=0} 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052635_70923f90-1bba-4907-82bd-48ac150660e8); Time taken: 0.138 seconds 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052635_70923f90-1bba-4907-82bd-48ac150660e8): EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,585 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,586 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-5:EXPLAIN] in serial mode 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,604 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,604 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:26:35,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052635_70923f90-1bba-4907-82bd-48ac150660e8); Time taken: 0.024 seconds 2018-07-21T05:26:35,609 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:26:35,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,610 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query EXPLAIN INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result file: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_448_816679318867150486-1/-local-10001 2018-07-21T05:26:35,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_448_816679318867150486-1 2018-07-21T05:26:35,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_448_816679318867150486-1 2018-07-21T05:26:35,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.163 seconds, Fetched: 69 row(s) 2018-07-21T05:26:35,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:35,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:26:35,612 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:26:35,612 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,614 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd): INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:26:35,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:35,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,616 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:35,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:35,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,626 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:26:35,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,635 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,635 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,636 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,636 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,638 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,638 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,638 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,638 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,639 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:35,639 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:26:35,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:35,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp2=[$9], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp2=[$9], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,662 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:26:35,662 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:26:35,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:35,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,664 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:35,664 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:26:35,665 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:26:35,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:26:35,681 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:26:35,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,681 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,681 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:26:35,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:26:35,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:26:35,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,689 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:26:35,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:35,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:26:35,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp2)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:26:35,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:26:35,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:35,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:26:35,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getCheckConstraints: directsql : SELECT "DBS"."NAME", "TBLS"."TBL_NAME",CASE WHEN "COLUMNS_V2"."COLUMN_NAME" IS NOT NULL THEN "COLUMNS_V2"."COLUMN_NAME" ELSE "PARTITION_KEYS"."PKEY_NAME" END, "KEY_CONSTRAINTS"."CONSTRAINT_NAME", "KEY_CONSTRAINTS"."ENABLE_VALIDATE_RELY", "KEY_CONSTRAINTS"."DEFAULT_VALUE" from "TBLS" INNER JOIN "KEY_CONSTRAINTS" ON "TBLS"."TBL_ID" = "KEY_CONSTRAINTS"."PARENT_TBL_ID" INNER JOIN "DBS" ON "TBLS"."DB_ID" = "DBS"."DB_ID" LEFT OUTER JOIN "COLUMNS_V2" ON "COLUMNS_V2"."CD_ID" = "KEY_CONSTRAINTS"."PARENT_CD_ID" AND "COLUMNS_V2"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" LEFT OUTER JOIN "PARTITION_KEYS" ON "TBLS"."TBL_ID" = "PARTITION_KEYS"."TBL_ID" AND "PARTITION_KEYS"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" WHERE "KEY_CONSTRAINTS"."CONSTRAINT_TYPE" = 5 AND "DBS"."CTLG_NAME" = ? AND "DBS"."NAME" = ? AND "TBLS"."TBL_NAME" = ? 2018-07-21T05:26:35,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_614_2169080079780146882-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:26:35,702 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_614_2169080079780146882-1 2018-07-21T05:26:35,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_614_2169080079780146882-1/-ext-10000 2018-07-21T05:26:35,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:26:35,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:26:35,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:26:35,705 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:26:35,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:26:35,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:26:35,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:26:35,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,706 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:26:35,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:26:35,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:26:35,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:26:35,707 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp2 is not null (type: boolean) 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:26:35,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:26:35,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,709 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:35,709 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:26:35,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.363182ms + 0.010275ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:26:35,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:26:35,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:26:35,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:26:35,721 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:26:35,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:35,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp2 is not null 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-26-35_614_2169080079780146882 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-26-35_614_2169080079780146882 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:26:35,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,723 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=33, flushCache_()=0, getAllDatabases_()=2, getCheckConstraints_(CheckConstraintsRequest, )=1, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=10, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd); Time taken: 0.109 seconds 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd): INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_partitioned_table 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:26:35,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,724 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:DDL] in serial mode 2018-07-21T05:26:35,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,725 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,725 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:26:35,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:26:35,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,734 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:26:35,734 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:26:35,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:26:35,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:26:35,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=INSERT is present 2018-07-21T05:26:35,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:DDL] in serial mode 2018-07-21T05:26:35,758 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:26:35,758 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:MAPRED] in serial mode 2018-07-21T05:26:35,777 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_614_2169080079780146882-1 2018-07-21T05:26:35,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_614_2169080079780146882-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:26:35,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:26:35,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:26:35,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:26:35,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd 2018-07-21T05:26:35,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:26:35,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:26:35,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,779 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: INSERT INTO TABLE druid_partitioned_t...NULL (Stage-2) 2018-07-21T05:26:35,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\nINSERT INTO TABLE druid_partitioned_table\nSELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp2 IS NOT NULL"} 2018-07-21T05:26:35,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:26:35,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,781 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:26:35,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,784 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 3.63KB 2018-07-21T05:26:35,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:26:35,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,796 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_614_2169080079780146882-1 2018-07-21T05:26:35,798 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:26:35,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,798 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:26:35,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,800 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.58KB 2018-07-21T05:26:35,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:26:35,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,814 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=INSERT INTO TABLE druid_partitioned_t...NULL (Stage-2), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd } 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:35,843 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:35,844 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741899_1075, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_5.recovery 2018-07-21T05:26:35,855 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_5.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:26:35,861 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_5, dagName=INSERT INTO TABLE druid_partitioned_t...NULL (Stage-2) 2018-07-21T05:26:35,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:35,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:36,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:36,405 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:26:36,407 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:26:36,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:36,911 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:26:37,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:37,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:37,917 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:26:38,874 WARN [NM Event dispatcher] containermanager.ContainerManagerImpl: couldn't find container container_1532175606211_0001_01_000006 while processing FINISH_CONTAINERS event 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:39,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,116 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741900_1076, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/ff0aac22a103447db870651972563519/0_descriptor.json 2018-07-21T05:26:39,128 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/ff0aac22a103447db870651972563519/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,138 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:39,138 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:39,138 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741901_1077, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_26_35.704-07_00/0_index.zip 2018-07-21T05:26:39,150 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/ff0aac22a103447db870651972563519/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,171 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,171 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:39,172 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,172 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741902_1078, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700.json 2018-07-21T05:26:39,181 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:39,595 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:39,596 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741903_1079, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/1905a863bdaa45199de2161efdd6c68b/1_index.zip 2018-07-21T05:26:39,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:39,604 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,604 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741904_1080, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/1905a863bdaa45199de2161efdd6c68b/1_descriptor.json 2018-07-21T05:26:39,641 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/1905a863bdaa45199de2161efdd6c68b/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,650 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/1905a863bdaa45199de2161efdd6c68b/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:39,659 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,659 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741905_1081, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_1.json 2018-07-21T05:26:39,672 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:39,800 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,801 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741906_1082, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/83425b7039ad4e6784cdf87d5bc61152/2_descriptor.json 2018-07-21T05:26:39,812 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/83425b7039ad4e6784cdf87d5bc61152/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,817 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:39,818 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:39,818 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741907_1083, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_26_35.704-07_00/2_index.zip 2018-07-21T05:26:39,828 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/83425b7039ad4e6784cdf87d5bc61152/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,835 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,835 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:39,836 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,836 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741908_1084, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_2.json 2018-07-21T05:26:39,849 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:39,936 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,937 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741909_1085, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/3b06ea70647a48f7b1e8ecf591588902/3_descriptor.json 2018-07-21T05:26:39,949 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/3b06ea70647a48f7b1e8ecf591588902/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:39,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:39,960 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741910_1086, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_26_35.704-07_00/3_index.zip 2018-07-21T05:26:39,971 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/3b06ea70647a48f7b1e8ecf591588902/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:39,987 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:39,988 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741911_1087, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_3.json 2018-07-21T05:26:40,003 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:40,080 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,080 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741912_1088, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f5233c5dc095420982cbe47602abfd8a/4_descriptor.json 2018-07-21T05:26:40,098 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f5233c5dc095420982cbe47602abfd8a/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:40,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,107 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741913_1089, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_26_35.704-07_00/4_index.zip 2018-07-21T05:26:40,115 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f5233c5dc095420982cbe47602abfd8a/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,129 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,130 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741914_1090, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_4.json 2018-07-21T05:26:40,140 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:40,212 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,212 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741915_1091, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/5076bb62a89e4e4e9d32d4649fbbcf15/5_descriptor.json 2018-07-21T05:26:40,224 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/5076bb62a89e4e4e9d32d4649fbbcf15/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:40,228 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,229 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741916_1092, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_26_35.704-07_00/5_index.zip 2018-07-21T05:26:40,239 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/5076bb62a89e4e4e9d32d4649fbbcf15/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,248 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,248 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741917_1093, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_5.json 2018-07-21T05:26:40,264 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052635.704-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:40,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,313 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741918_1094, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/d4721050f3fc43f0902110c52bd4819c/0_descriptor.json 2018-07-21T05:26:40,334 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/d4721050f3fc43f0902110c52bd4819c/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,339 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741919_1095, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/0_index.zip 2018-07-21T05:26:40,350 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/d4721050f3fc43f0902110c52bd4819c/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,359 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,359 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,360 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,360 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741920_1096, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700.json 2018-07-21T05:26:40,374 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:40,506 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,506 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741921_1097, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/53d9b1c63a674d109ec72eb921a48932/1_descriptor.json 2018-07-21T05:26:40,530 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/53d9b1c63a674d109ec72eb921a48932/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:40,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,539 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741922_1098, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/1_index.zip 2018-07-21T05:26:40,565 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/53d9b1c63a674d109ec72eb921a48932/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:40,574 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,574 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741923_1099, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_1.json 2018-07-21T05:26:40,591 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,677 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,678 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741924_1100, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/a22679f9649443bb977c6bd267cf7639/2_descriptor.json 2018-07-21T05:26:40,689 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/a22679f9649443bb977c6bd267cf7639/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:40,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,696 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741925_1101, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/2_index.zip 2018-07-21T05:26:40,704 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/a22679f9649443bb977c6bd267cf7639/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,711 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,711 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:40,712 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,712 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741926_1102, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_2.json 2018-07-21T05:26:40,721 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,786 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,786 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741927_1103, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f69f90c112d64eceb1d1050b85e22ad7/3_descriptor.json 2018-07-21T05:26:40,796 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f69f90c112d64eceb1d1050b85e22ad7/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:40,801 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,801 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741928_1104, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/3_index.zip 2018-07-21T05:26:40,810 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/f69f90c112d64eceb1d1050b85e22ad7/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,819 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,819 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,819 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,819 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:40,820 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,820 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741929_1105, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_3.json 2018-07-21T05:26:40,829 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:40,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,911 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741930_1106, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/4073e330257d41c18febac3bb3057edb/4_descriptor.json 2018-07-21T05:26:40,933 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:26:40,939 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/4073e330257d41c18febac3bb3057edb/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:40,944 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:40,944 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741931_1107, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/4_index.zip 2018-07-21T05:26:40,957 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/4073e330257d41c18febac3bb3057edb/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:40,967 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:40,967 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741932_1108, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_4.json 2018-07-21T05:26:40,994 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:41,038 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,038 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741933_1109, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/8a8e04d06404408889bb1423eae2326f/5_descriptor.json 2018-07-21T05:26:41,049 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/8a8e04d06404408889bb1423eae2326f/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:41,054 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:41,054 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741934_1110, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_26_35.704-07_00/5_index.zip 2018-07-21T05:26:41,073 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/intermediateSegmentDir/default.druid_partitioned_table/8a8e04d06404408889bb1423eae2326f/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:41,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,082 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741935_1111, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_5.json 2018-07-21T05:26:41,090 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052635.704-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-1058559785_52 2018-07-21T05:26:41,115 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_5.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:26:41,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:41,117 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:26:41,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:26:41,119 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:26:41,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: commit insert into table druid_partitioned_table overwrite false 2018-07-21T05:26:41,161 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Moving [12] Druid segments from staging directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd] to Deep storage [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage] 2018-07-21T05:26:41,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:26:41,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:26:41,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:26:41,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:26:41,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:26:41,162 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hdfs.HdfsDataSegmentPusher: Configured HDFS as deep storage 2018-07-21T05:26:41,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:26:41,163 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:26:41,170 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Building timeline for umbrella Interval [1969-12-31T23:00:00.000Z/1970-01-01T01:00:00.000Z] 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,217 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,218 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,218 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741936_1112, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,230 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,240 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,240 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,240 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,241 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,241 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,241 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,241 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:41,241 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,241 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741937_1113, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,251 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,262 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,262 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741938_1114, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,275 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,288 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,288 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,288 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:41,289 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,289 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741939_1115, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,304 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:26:41,321 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,321 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741940_1116, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,336 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,346 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,346 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:41,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,347 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741941_1117, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,359 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,375 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,376 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:41,376 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:26:41,376 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,376 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741942_1118, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,392 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:26:41,409 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,409 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741943_1119, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,418 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:26:41,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:26:41,427 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741944_1120, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,437 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,444 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:26:41,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,445 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741945_1121, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,454 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:26:41,462 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,463 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741946_1122, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,471 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:26:41,479 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:26:41,480 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741947_1123, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json 2018-07-21T05:26:41,488 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_23_40.096-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11 2018-07-21T05:26:41,490 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6 2018-07-21T05:26:41,491 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7 2018-07-21T05:26:41,491 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8 2018-07-21T05:26:41,491 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9 2018-07-21T05:26:41,491 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10 2018-07-21T05:26:41,491 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11 2018-07-21T05:26:41,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: checking load status from coordinator localhost:8081 2018-07-21T05:26:41,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/status] starting 2018-07-21T05:26:41,503 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,503 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] Got response: 200 OK 2018-07-21T05:26:41,504 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@73d753c4 2018-07-21T05:26:41,504 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 408B, last=false 2018-07-21T05:26:41,504 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:26:41,504 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 0B, last=true 2018-07-21T05:26:41,507 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Waiting for the loading of [12] segments 2018-07-21T05:26:41,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:26:41,509 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,509 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:26:41,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:26:41,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:26:41,513 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,513 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:26:41,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:26:41,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:26:41,518 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,518 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:26:41,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:26:41,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:26:41,523 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,523 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:26:41,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:26:41,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:26:41,528 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,528 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:26:41,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:26:41,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:26:41,533 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,533 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:26:41,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:26:41,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:26:41,540 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,540 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:26:41,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:26:41,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:26:41,551 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,552 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:26:41,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:26:41,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:26:41,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:26:41,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:26:41,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:26:41,565 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,565 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:26:41,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:26:41,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:26:41,569 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,569 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:26:41,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:26:41,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:26:41,572 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:26:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:26:41,572 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:26:41,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:26:46,364 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:26:46,387 WARN [ContainersLauncher #2] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0001_01_000005 is : 143 2018-07-21T05:26:46,389 DEBUG [ContainersLauncher #2] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #2, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:26:48,502 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,503 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,506 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:48,515 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:26:49,234 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:26:49,297 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:27:11,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:27:11,577 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,577 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:27:11,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:27:11,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:27:11,582 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,582 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:27:11,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:27:11,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:27:11,585 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,585 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:27:11,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:27:11,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:27:11,590 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,590 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:27:11,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:27:11,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:27:11,594 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,594 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:27:11,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:27:11,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:27:11,611 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,611 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:27:11,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:27:11,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:27:11,615 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,615 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:27:11,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:27:11,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:27:11,618 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,618 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:27:11,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:27:11,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:27:11,621 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,621 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:27:11,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:27:11,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:27:11,626 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,626 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:27:11,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:27:11,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:27:11,629 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,629 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:27:11,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:27:11,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:27:11,633 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:11,633 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:27:11,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:27:19,234 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:27:19,297 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:27:41,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:27:41,638 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,638 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:27:41,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:27:41,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:27:41,642 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,642 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:27:41,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:27:41,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:27:41,647 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,647 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:27:41,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:27:41,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:27:41,650 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,650 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:27:41,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:27:41,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:27:41,654 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,654 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:27:41,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:27:41,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:27:41,658 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,658 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:27:41,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:27:41,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:27:41,661 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,661 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:27:41,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:27:41,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:27:41,668 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,668 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:27:41,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:27:41,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:27:41,673 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,673 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:27:41,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:27:41,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:27:41,676 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,676 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:27:41,676 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:27:41,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:27:41,679 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,679 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:27:41,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:27:41,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:27:41,683 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:27:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:27:41,683 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:27:41,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:27:49,235 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:27:49,298 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:28:11,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:28:11,691 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,691 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:28:11,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:28:11,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:28:11,695 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,695 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:28:11,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:28:11,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:28:11,699 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,699 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:28:11,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:28:11,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:28:11,702 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,702 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:28:11,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:28:11,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:28:11,705 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,705 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:28:11,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:28:11,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:28:11,709 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,709 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:28:11,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:28:11,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:28:11,712 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,712 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:28:11,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:28:11,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:28:11,716 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,716 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:28:11,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:28:11,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:28:11,725 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,725 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:28:11,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:28:11,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:28:11,729 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,729 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:28:11,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:28:11,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:28:11,733 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,733 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:28:11,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:28:11,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:28:11,737 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:11 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:11,737 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:28:11,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:28:19,235 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:28:19,298 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:28:41,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:28:41,742 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,742 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:28:41,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:28:41,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:28:41,747 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,747 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:28:41,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:28:41,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] starting 2018-07-21T05:28:41,751 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,751 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] Got response: 204 No Content 2018-07-21T05:28:41,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_11] response is [] 2018-07-21T05:28:41,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:28:41,754 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,754 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:28:41,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:28:41,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:28:41,762 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,762 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:28:41,762 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:28:41,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] starting 2018-07-21T05:28:41,766 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,766 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] Got response: 204 No Content 2018-07-21T05:28:41,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_6] response is [] 2018-07-21T05:28:41,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:28:41,771 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,771 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:28:41,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:28:41,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] starting 2018-07-21T05:28:41,778 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,778 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] Got response: 204 No Content 2018-07-21T05:28:41,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_7] response is [] 2018-07-21T05:28:41,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:28:41,785 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,785 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:28:41,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:28:41,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] starting 2018-07-21T05:28:41,789 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,790 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] Got response: 204 No Content 2018-07-21T05:28:41,790 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_8] response is [] 2018-07-21T05:28:41,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] starting 2018-07-21T05:28:41,793 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,793 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] Got response: 204 No Content 2018-07-21T05:28:41,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:23:40.096-07:00_9] response is [] 2018-07-21T05:28:41,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] starting 2018-07-21T05:28:41,797 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:28:41 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:28:41,797 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] Got response: 204 No Content 2018-07-21T05:28:41,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:23:40.096-07:00_10] response is [] 2018-07-21T05:28:49,235 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:28:49,298 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:29:11,798 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Wait time exhausted and we have [12] out of [12] segments not loaded yet 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,804 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:29:11,804 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:29:11,804 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:29:11,804 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@druid_partitioned_table 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=1, alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=23, getTable_(String, String, )=8} 2018-07-21T05:29:11,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052635_6fbeb87c-52ee-4c81-a73d-861cedd046cd); Time taken: 156.085 seconds 2018-07-21T05:29:11,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query INSERT INTO TABLE druid_partitioned_table SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:29:11,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-26-35_614_2169080079780146882-1 2018-07-21T05:29:11,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-26-35_614_2169080079780146882-1 2018-07-21T05:29:11,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 156.195 seconds 2018-07-21T05:29:11,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:11,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:29:11,813 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:11,813 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:29:11,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:29:11,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,814 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052911_47d7a7c4-bee4-432b-a5cf-70412222f916): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,816 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:11,816 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,816 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:29:11,816 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:11,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,816 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,817 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,826 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:11,826 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:11,829 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1 2018-07-21T05:29:11,829 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:29:11,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:11,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,832 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,832 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,834 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,834 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,835 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:29:11,835 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:29:11,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:29:11,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:11,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:29:11,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:29:11,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,862 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:29:11,862 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:29:11,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:11,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,865 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:11,865 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:11,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:11,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after top-level introduceDerivedTable HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(_c0=[$0], _c1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:29:11,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:11,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,897 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:11,897 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:11,900 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1 2018-07-21T05:29:11,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:29:11,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for druid_partitioned_table TS[0] 2018-07-21T05:29:11,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:29:11,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f0) _c0) (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f1) _c1)) 2018-07-21T05:29:11,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:29:11,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:29:11,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:29:11,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001/.hive-staging_hive_2018-07-21_05-29-11_814_4990282370497482265-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001 2018-07-21T05:29:11,901 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001/.hive-staging_hive_2018-07-21_05-29-11_814_4990282370497482265-1 2018-07-21T05:29:11,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001/.hive-staging_hive_2018-07-21_05-29-11_814_4990282370497482265-1/-ext-10003 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001 row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:11,904 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-SEL[1]-FS[2] 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(2) 2018-07-21T05:29:11,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:29:11,905 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-SEL[1]-FS[2] 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-SEL[1]-LIST_SINK[3] 2018-07-21T05:29:11,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:29:11,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:29:11,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,906 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:$f0, type:bigint, comment:null), FieldSchema(name:$f1, type:bigint, comment:null)], properties:null) 2018-07-21T05:29:11,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:29:11,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing operator TS[0] 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Operator 0 TS initialized 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing children of 0 TS 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing child 1 SEL 2018-07-21T05:29:11,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing operator SEL[1] 2018-07-21T05:29:11,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: SELECT struct<$f0:bigint,$f1:bigint> 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Operator 1 SEL initialized 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing children of 1 SEL 2018-07-21T05:29:11,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing child 3 LIST_SINK 2018-07-21T05:29:11,908 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[3] 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 3 LIST_SINK initialized 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK done is reset. 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL done is reset. 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS done is reset. 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=20, flushCache_()=0, getAllDatabases_()=3, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052911_47d7a7c4-bee4-432b-a5cf-70412222f916); Time taken: 0.095 seconds 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052911_47d7a7c4-bee4-432b-a5cf-70412222f916): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@druid_partitioned_table 2018-07-21T05:29:11,909 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@druid_partitioned_table 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052911_47d7a7c4-bee4-432b-a5cf-70412222f916); Time taken: 0.001 seconds 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,910 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:11,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:29:11,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<$f0:bigint,$f1:bigint> 2018-07-21T05:29:11,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, last_modified_by=hive_test_user, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175995, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, last_modified_time=1532175995, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} partition properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, last_modified_by=hive_test_user, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532175995, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, last_modified_time=1532175995, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} 2018-07-21T05:29:11,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidQueryRecordReader: Retrieving data from druid using query: TimeseriesQuery{dataSource='default.druid_partitioned_table', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}} 2018-07-21T05:29:11,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] starting 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:29:11 GMT Content-Type: application/x-jackson-smile X-Druid-Query-Id: 17192f2e-cbe7-44fb-854f-225d3cea6cff X-Druid-Response-Context: {} Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got response: 200 OK 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@3cda119f 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 6B, last=false 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:29:11,922 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 0B, last=true 2018-07-21T05:29:11,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: close called for operator TS[0] 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing operator TS[0] 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_TS_0:0, 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing child = SEL[1] 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: close called for operator SEL[1] 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing operator SEL[1] 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_SEL_1:0, 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing child = LIST_SINK[3] 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[3] 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[3] 2018-07-21T05:29:11,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_3:0, 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 3 Close done 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: 1 Close done 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: 0 Close done 2018-07-21T05:29:11,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001 2018-07-21T05:29:11,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1 2018-07-21T05:29:11,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_814_4990282370497482265-1/-mr-10001/.hive-staging_hive_2018-07-21_05-29-11_814_4990282370497482265-1 2018-07-21T05:29:11,930 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.096 seconds 2018-07-21T05:29:11,930 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:11,930 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:29:11,930 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:11,930 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:29:11,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:29:11,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:11,932 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052911_7a1c6fed-ffbc-4bd1-bd5d-d223901f5cc5): EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:11,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:11,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:29:11,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,938 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:11,938 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:29:11,938 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:29:11,938 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:11,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,939 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,939 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,947 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,948 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:11,948 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:11,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,949 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,949 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:11,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,959 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:29:11,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,962 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,962 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,964 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,964 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,966 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,966 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:11,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:29:11,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:29:11,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:11,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:11,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:11,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:11,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:11,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:11,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:29:11,996 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:29:11,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:11,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,997 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:11,998 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:11,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:11,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:11,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:11,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,014 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:12,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,014 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,014 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,023 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:12,024 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:12,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,024 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,024 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,031 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:29:12,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:29:12,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:29:12,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:29:12,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:29:12,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:29:12,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:29:12,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,034 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,034 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,035 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,035 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getCheckConstraints: directsql : SELECT "DBS"."NAME", "TBLS"."TBL_NAME",CASE WHEN "COLUMNS_V2"."COLUMN_NAME" IS NOT NULL THEN "COLUMNS_V2"."COLUMN_NAME" ELSE "PARTITION_KEYS"."PKEY_NAME" END, "KEY_CONSTRAINTS"."CONSTRAINT_NAME", "KEY_CONSTRAINTS"."ENABLE_VALIDATE_RELY", "KEY_CONSTRAINTS"."DEFAULT_VALUE" from "TBLS" INNER JOIN "KEY_CONSTRAINTS" ON "TBLS"."TBL_ID" = "KEY_CONSTRAINTS"."PARENT_TBL_ID" INNER JOIN "DBS" ON "TBLS"."DB_ID" = "DBS"."DB_ID" LEFT OUTER JOIN "COLUMNS_V2" ON "COLUMNS_V2"."CD_ID" = "KEY_CONSTRAINTS"."PARENT_CD_ID" AND "COLUMNS_V2"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" LEFT OUTER JOIN "PARTITION_KEYS" ON "TBLS"."TBL_ID" = "PARTITION_KEYS"."TBL_ID" AND "PARTITION_KEYS"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" WHERE "KEY_CONSTRAINTS"."CONSTRAINT_TYPE" = 5 AND "DBS"."CTLG_NAME" = ? AND "DBS"."NAME" = ? AND "TBLS"."TBL_NAME" = ? 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-11_932_7802309835682783064-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:29:12,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-11_932_7802309835682783064-1/-ext-10000 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:12,037 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:29:12,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:29:12,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:29:12,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:29:12,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:29:12,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:29:12,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:29:12,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:29:12,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:29:12,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,041 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:29:12,041 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:29:12,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.480752ms + 0.009763ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:29:12,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:29:12,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:29:12,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:29:12,054 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:29:12,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:29:12,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:29:12,057 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:29:12,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:29:12,058 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:29:12,058 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:29:12,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:29:12,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-29-11_932_7802309835682783064 2018-07-21T05:29:12,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-29-11_932_7802309835682783064 2018-07-21T05:29:12,058 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:29:12,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:29:12,068 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:29:12,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,068 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:Explain, type:string, comment:null)], properties:null) 2018-07-21T05:29:12,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,068 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:29:12,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=38, flushCache_()=1, getAllDatabases_()=1, getCheckConstraints_(CheckConstraintsRequest, )=1, getUniqueConstraints_(UniqueConstraintsRequest, )=0, getPrimaryKeys_(PrimaryKeysRequest, )=3, getTableColumnStatistics_(String, String, List, )=11, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052911_7a1c6fed-ffbc-4bd1-bd5d-d223901f5cc5); Time taken: 0.136 seconds 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052911_7a1c6fed-ffbc-4bd1-bd5d-d223901f5cc5): EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-5:EXPLAIN] in serial mode 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,088 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,088 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,092 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:29:12,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,093 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:29:12,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:29:12,093 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052911_7a1c6fed-ffbc-4bd1-bd5d-d223901f5cc5); Time taken: 0.023 seconds 2018-07-21T05:29:12,093 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:29:12,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,093 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:12,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result file: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_932_7802309835682783064-1/-local-10001 2018-07-21T05:29:12,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-11_932_7802309835682783064-1 2018-07-21T05:29:12,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: file:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-11_932_7802309835682783064-1 2018-07-21T05:29:12,096 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.161 seconds, Fetched: 69 row(s) 2018-07-21T05:29:12,096 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:12,096 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:29:12,096 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:29:12,096 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:29:12,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:29:12,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,098 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348): INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:29:12,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,101 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:12,101 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:29:12,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:29:12,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:12,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,102 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,114 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:12,114 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:12,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,114 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,114 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,131 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:29:12,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,136 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,136 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,137 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,137 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,138 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:29:12,138 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:29:12,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:12,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,172 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:29:12,172 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:29:12,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:12,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,174 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:12,174 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:29:12,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:29:12,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:29:12,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:29:12,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,202 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:29:12,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,213 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:29:12,213 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:29:12,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,213 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,213 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:29:12,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:29:12,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:29:12,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:29:12,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:29:12,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:29:12,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:29:12,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,227 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,229 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,229 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_check_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getCheckConstraints: directsql : SELECT "DBS"."NAME", "TBLS"."TBL_NAME",CASE WHEN "COLUMNS_V2"."COLUMN_NAME" IS NOT NULL THEN "COLUMNS_V2"."COLUMN_NAME" ELSE "PARTITION_KEYS"."PKEY_NAME" END, "KEY_CONSTRAINTS"."CONSTRAINT_NAME", "KEY_CONSTRAINTS"."ENABLE_VALIDATE_RELY", "KEY_CONSTRAINTS"."DEFAULT_VALUE" from "TBLS" INNER JOIN "KEY_CONSTRAINTS" ON "TBLS"."TBL_ID" = "KEY_CONSTRAINTS"."PARENT_TBL_ID" INNER JOIN "DBS" ON "TBLS"."DB_ID" = "DBS"."DB_ID" LEFT OUTER JOIN "COLUMNS_V2" ON "COLUMNS_V2"."CD_ID" = "KEY_CONSTRAINTS"."PARENT_CD_ID" AND "COLUMNS_V2"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" LEFT OUTER JOIN "PARTITION_KEYS" ON "TBLS"."TBL_ID" = "PARTITION_KEYS"."TBL_ID" AND "PARTITION_KEYS"."INTEGER_IDX" = "KEY_CONSTRAINTS"."PARENT_INTEGER_IDX" WHERE "KEY_CONSTRAINTS"."CONSTRAINT_TYPE" = 5 AND "DBS"."CTLG_NAME" = ? AND "DBS"."NAME" = ? AND "TBLS"."TBL_NAME" = ? 2018-07-21T05:29:12,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-12_099_8653997294804690914-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:29:12,231 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-12_099_8653997294804690914-1 2018-07-21T05:29:12,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-12_099_8653997294804690914-1/-ext-10000 2018-07-21T05:29:12,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:29:12,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:29:12,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:29:12,233 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:29:12,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:29:12,235 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:29:12,235 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:29:12,235 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,235 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Method did match: passed = [] accepted = [] method = public org.apache.hadoop.hive.serde2.io.DoubleWritable org.apache.hadoop.hive.ql.udf.UDFRand.evaluate() 2018-07-21T05:29:12,236 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity], Column[__druid_extra_partition_key]] 2018-07-21T05:29:12,237 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:29:12,237 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col10=Column[_col10], VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__druid_extra_partition_key=Column[__druid_extra_partition_key], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:29:12,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:29:12,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,239 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:29:12,239 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:29:12,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.430418ms + 0.011713ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:29:12,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:29:12,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:29:12,252 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Function class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge is undeterministic. Don't evaluate immediately. 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0]))) GenericUDFOPMod(GenericUDFFloor(GenericUDFOPDivide(Const double 1.0, GenericUDFBridge ==> rand ())), Const int 6)) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null},KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity] Column[KEY.__druid_extra_partition_key]) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null},__druid_extra_partition_key: bigint|{null}) 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:29:12,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:29:12,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:29:12,255 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:29:12,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-29-12_099_8653997294804690914 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.QueryPlanPostProcessor: Found org.apache.hadoop.hive.ql.plan.DDLWork - no FileSinkOperation can be present. executionId=hive_2018-07-21_05-29-12_099_8653997294804690914 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:29:12,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,256 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=4, isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=8, getTable_(String, String, )=51, flushCache_()=0, getAllDatabases_()=2, getCheckConstraints_(CheckConstraintsRequest, )=1, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=11, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348); Time taken: 0.158 seconds 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348): INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_partitioned_table 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:29:12,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,257 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:DDL] in serial mode 2018-07-21T05:29:12,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,258 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,258 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:29:12,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:29:12,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,269 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:29:12,270 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.druid_partitioned_table newtbl=druid_partitioned_table 2018-07-21T05:29:12,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:29:12,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:29:12,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@druid_partitioned_table) Type=TABLE WriteType=INSERT_OVERWRITE is present 2018-07-21T05:29:12,296 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:DDL] in serial mode 2018-07-21T05:29:12,296 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:29:12,296 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:MAPRED] in serial mode 2018-07-21T05:29:12,313 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:29:12,336 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-12_099_8653997294804690914-1 2018-07-21T05:29:12,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-12_099_8653997294804690914-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:29:12,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:29:12,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:29:12,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:29:12,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348 2018-07-21T05:29:12,337 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:29:12,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:29:12,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,338 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: INSERT OVERWRITE TABLE druid_partitio...NULL (Stage-2) 2018-07-21T05:29:12,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\n\nINSERT OVERWRITE TABLE druid_partitioned_table\n SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:29:12,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:29:12,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,340 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:29:12,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,341 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 3.62KB 2018-07-21T05:29:12,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:29:12,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,356 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-12_099_8653997294804690914-1 2018-07-21T05:29:12,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:29:12,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:29:12,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,361 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.58KB 2018-07-21T05:29:12,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:29:12,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=INSERT OVERWRITE TABLE druid_partitio...NULL (Stage-2), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348 } 2018-07-21T05:29:12,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:12,425 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:12,425 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741948_1124, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_6.recovery 2018-07-21T05:29:12,468 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_6.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:29:12,483 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_6, dagName=INSERT OVERWRITE TABLE druid_partitio...NULL (Stage-2) 2018-07-21T05:29:12,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:12,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:13,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:13,049 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:29:13,051 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:29:14,067 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:29:14,080 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:29:14,183 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:29:14,183 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:29:14,183 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:29:14,183 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:29:14,184 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:29:14,189 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:29:14,197 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:29:14,197 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:29:14,197 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:29:14,197 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:29:14,410 DEBUG [ContainersLauncher #1] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #1, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:29:15,414 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:15,414 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:16,069 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:29:18,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:18,581 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:29:19,235 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:29:19,298 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:29:21,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:29:22,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:22,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0/1 2018-07-21T05:29:23,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:23,121 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,380 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:24,381 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741949_1125, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/2fa0747c7cb4461bbf7dbb84fde80094/0_descriptor.json 2018-07-21T05:29:24,438 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/2fa0747c7cb4461bbf7dbb84fde80094/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:24,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,462 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:24,463 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:24,464 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741950_1126, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/0_index.zip 2018-07-21T05:29:24,497 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/2fa0747c7cb4461bbf7dbb84fde80094/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:24,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:24,533 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741951_1127, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700.json 2018-07-21T05:29:24,556 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:29:25,067 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,068 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741952_1128, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/dbe108b92d8b47c4ab67c095f97c1ad1/1_descriptor.json 2018-07-21T05:29:25,090 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/dbe108b92d8b47c4ab67c095f97c1ad1/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,096 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,096 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,097 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741953_1129, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/1_index.zip 2018-07-21T05:29:25,112 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/dbe108b92d8b47c4ab67c095f97c1ad1/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:25,121 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,121 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741954_1130, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_1.json 2018-07-21T05:29:25,151 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:25,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,270 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741955_1131, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/b0e9b0a157804bfa8acc7eab0543ab47/2_descriptor.json 2018-07-21T05:29:25,285 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/b0e9b0a157804bfa8acc7eab0543ab47/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:29:25,291 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,292 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741956_1132, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/2_index.zip 2018-07-21T05:29:25,302 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/b0e9b0a157804bfa8acc7eab0543ab47/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:25,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,311 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741957_1133, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_2.json 2018-07-21T05:29:25,321 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:25,430 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,430 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741958_1134, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/c5cd2021052f4c50b584237ffa62734f/3_descriptor.json 2018-07-21T05:29:25,442 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/c5cd2021052f4c50b584237ffa62734f/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,446 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:25,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,447 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741959_1135, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/3_index.zip 2018-07-21T05:29:25,468 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/c5cd2021052f4c50b584237ffa62734f/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:25,476 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,477 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741960_1136, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_3.json 2018-07-21T05:29:25,489 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:25,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,597 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741961_1137, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/393dbc759c2e49b49129f1717c6896b0/4_descriptor.json 2018-07-21T05:29:25,612 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/393dbc759c2e49b49129f1717c6896b0/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,627 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741962_1138, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/4_index.zip 2018-07-21T05:29:25,641 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/393dbc759c2e49b49129f1717c6896b0/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,657 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,658 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741963_1139, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_4.json 2018-07-21T05:29:25,690 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:25,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,796 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741964_1140, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/9d78cf3ac0a94ea1b284675b8d74c937/5_descriptor.json 2018-07-21T05:29:25,811 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/9d78cf3ac0a94ea1b284675b8d74c937/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:25,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,821 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741965_1141, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/5_index.zip 2018-07-21T05:29:25,845 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/9d78cf3ac0a94ea1b284675b8d74c937/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:25,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:25,868 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741966_1142, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_5.json 2018-07-21T05:29:25,905 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T052912.233-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:26,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:26,039 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741967_1143, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fc121eca7c1d4583b92100f2fd11b65e/0_descriptor.json 2018-07-21T05:29:26,072 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fc121eca7c1d4583b92100f2fd11b65e/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:26,080 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,080 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,080 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:29:26,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:26,081 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741968_1144, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/0_index.zip 2018-07-21T05:29:26,149 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:29:26,512 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fc121eca7c1d4583b92100f2fd11b65e/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:29:26,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:26,529 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741969_1145, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700.json 2018-07-21T05:29:26,568 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:27,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,042 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741970_1146, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/83c9d56de9d74e8abee47bf963af0860/1_index.zip 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:27,066 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:27,066 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741971_1147, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/83c9d56de9d74e8abee47bf963af0860/1_descriptor.json 2018-07-21T05:29:27,114 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/83c9d56de9d74e8abee47bf963af0860/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,138 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/83c9d56de9d74e8abee47bf963af0860/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,156 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,156 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,156 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,157 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,157 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741972_1148, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_1.json 2018-07-21T05:29:27,175 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,405 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741973_1149, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fea4f72e07e043d489429a0351597ff3/2_descriptor.json 2018-07-21T05:29:27,417 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fea4f72e07e043d489429a0351597ff3/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:27,422 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,422 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741974_1150, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/2_index.zip 2018-07-21T05:29:27,473 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/fea4f72e07e043d489429a0351597ff3/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:27,488 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,489 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741975_1151, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_2.json 2018-07-21T05:29:27,501 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:27,637 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:27,637 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741976_1152, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/f0b7534d6cea496c876a675223624f70/3_descriptor.json 2018-07-21T05:29:27,658 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/f0b7534d6cea496c876a675223624f70/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:27,664 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:27,664 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741977_1153, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/3_index.zip 2018-07-21T05:29:27,743 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/f0b7534d6cea496c876a675223624f70/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:27,779 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,779 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741978_1154, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_3.json 2018-07-21T05:29:27,814 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,932 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:27,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:27,933 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741979_1155, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/6b8882d30bcf4692871815b28562ea49/4_descriptor.json 2018-07-21T05:29:27,963 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/6b8882d30bcf4692871815b28562ea49/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:27,967 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:27,968 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741980_1156, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/4_index.zip 2018-07-21T05:29:28,005 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/6b8882d30bcf4692871815b28562ea49/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,027 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,028 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741981_1157, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_4.json 2018-07-21T05:29:28,052 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,118 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,118 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741982_1158, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/64d15ab2c2ba47318820c90f71c69dfc/5_descriptor.json 2018-07-21T05:29:28,132 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/64d15ab2c2ba47318820c90f71c69dfc/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:28,138 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,138 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741983_1159, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/5_index.zip 2018-07-21T05:29:28,156 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/intermediateSegmentDir/default.druid_partitioned_table/64d15ab2c2ba47318820c90f71c69dfc/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:28,165 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,166 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741984_1160, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_5.json 2018-07-21T05:29:28,182 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348/segmentsDescriptorDir/default.druid_partitioned_table_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T052912.233-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_1419434706_30 2018-07-21T05:29:28,234 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_6.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:29:28,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:28,242 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:29:28,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:29:28,245 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:29:28,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: commit insert into table druid_partitioned_table overwrite true 2018-07-21T05:29:28,349 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Moving [12] Druid segments from staging directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348] to Deep storage [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage] 2018-07-21T05:29:28,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:29:28,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:29:28,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:29:28,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:29:28,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:29:28,350 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hdfs.HdfsDataSegmentPusher: Configured HDFS as deep storage 2018-07-21T05:29:28,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:29:28,351 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:29:28,492 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,492 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,493 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741985_1161, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,540 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,552 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741986_1162, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,569 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:28,592 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,593 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741987_1163, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,605 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,621 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,621 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741988_1164, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,644 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,654 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,654 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:28,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,655 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741989_1165, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,676 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:29:28,687 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,687 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741990_1166, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,699 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:29:28,709 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,709 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741991_1167, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,719 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:29:28,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,730 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741992_1168, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,775 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:29:28,786 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,786 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741993_1169, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,797 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,823 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,823 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:29:28,824 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:29:28,824 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741994_1170, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,835 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,856 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741995_1171, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,926 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:29:28,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:29:28,937 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741996_1172, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json 2018-07-21T05:29:28,965 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_partitioned_table/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_29_12.233-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:29:28,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00 2018-07-21T05:29:28,967 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1 2018-07-21T05:29:28,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2 2018-07-21T05:29:28,969 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3 2018-07-21T05:29:28,969 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4 2018-07-21T05:29:28,969 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5 2018-07-21T05:29:28,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: checking load status from coordinator localhost:8081 2018-07-21T05:29:28,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/status] starting 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:29:28 GMT Content-Type: application/json Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] Got response: 200 OK 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@518d4551 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 408B, last=false 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:29:28,983 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 0B, last=true 2018-07-21T05:29:28,985 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Waiting for the loading of [12] segments 2018-07-21T05:29:28,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:29:29,005 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:28 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,005 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:29:29,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:29:29,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:29:29,010 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,010 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:29:29,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:29:29,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:29:29,014 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,015 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:29:29,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:29:29,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:29:29,017 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,018 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:29:29,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:29:29,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:29:29,022 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,022 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:29:29,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:29:29,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:29:29,026 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,026 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:29:29,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:29:29,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:29:29,039 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,039 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:29:29,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:29:29,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:29:29,048 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,048 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:29:29,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:29:29,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:29:29,052 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,052 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:29:29,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:29:29,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:29:29,056 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,056 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:29:29,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:29:29,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:29:29,061 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,061 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:29:29,063 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:29:29,063 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:29:29,075 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:29,075 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:29:29,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:29:34,460 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:29:34,482 WARN [ContainersLauncher #1] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0001_01_000007 is : 143 2018-07-21T05:29:34,484 DEBUG [ContainersLauncher #1] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #1, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,599 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,601 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,601 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:36,602 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:29:49,236 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:29:49,299 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:29:54,615 INFO [pool-5-thread-1] NameNodeMetricsLog: >> Begin NameNode metrics dump 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":32,... 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:29:54,666 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=65599 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.23465703971119134 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=1 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0001_000001":1} 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=1.0722021660649819 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:SentBytes=51762 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=554 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=1 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=554 2018-07-21T05:29:54,674 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=388.0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=1 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=1 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=8493.0 2018-07-21T05:29:54,677 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,678 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=1278 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=1278 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:FilesTotal=152 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlocksTotal=84 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsed=299511364 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=84 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemaining=155876029396 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=165801947580 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=32 15 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalSyncCount=1021 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=1278 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:29:54,683 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemainingGB=145.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,685 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@3da761cb 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=66 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=38934660764 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=55985599 2018-07-21T05:29:54,688 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=2 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,693 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=1174021 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.3923686105111591 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=51 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":2} 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=1.0169186465082793 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=2 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:SentBytes=383576 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=2778 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=2778 2018-07-21T05:29:54,697 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,704 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=36 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=239.01775 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=878 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCount=22 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=17 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=464.8285 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=419 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1805 2018-07-21T05:29:54,705 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=3.027027027027027 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=1 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=129.0 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=37 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=1 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=21.0 2018-07-21T05:29:54,725 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":29,... 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:29:54,727 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:29:54,730 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=1 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=6 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=7 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:29:54,732 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@5db0411e 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:29:54,733 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=66 2018-07-21T05:29:54,734 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=38934627996 2018-07-21T05:29:54,734 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=47599373 2018-07-21T05:29:54,734 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XceiverCount=1 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":36,... 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:29:54,735 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,738 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=36 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=239.026 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=484 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCount=22 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=17 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=464.8285 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=419 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1805 2018-07-21T05:29:54,743 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,745 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=114525 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.25 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=4 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=8.0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:SentBytes=1180 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=4 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=4 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=4 2018-07-21T05:29:54,749 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=2 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=2 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=7.0 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=9.0 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,755 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=258 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsNumOps=1020 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.045098039215686274 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.10954616588419405 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.049586776859504134 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesCreated=362 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=121 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetListingOps=28 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TotalFileOps=988 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AddBlockOps=172 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteFileOps=98 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsNumOps=1278 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=532 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateFileOps=172 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesRenamed=120 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsQueued=3 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=63 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FileInfoOps=325 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsBatched=93 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesDeleted=211 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=21 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetBlockLocations=73 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:29:54,756 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=171781 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.6 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=5 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=55.2 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:SentBytes=1491 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=5 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=5 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=5 2018-07-21T05:29:54,758 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,761 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:29:54,761 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:29:54,761 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:DfsUsed=98469106 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Remaining=39068794880 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:29:54,762 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=70 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=858 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=62 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=3.43 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=1 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=200 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=435.0243902439024 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=19 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=123 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=6467595.43006993 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=670 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195072265 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=1.426356589147287 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=19 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=2988 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=2988 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=96506717 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=19 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=11070.755354752342 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=200 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=123 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=2.915 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=165 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=1753 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=123 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=14372.201939532231 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=195133.55220883535 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=41.68421052631579 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:54,764 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:29:54,766 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XceiverCount=3 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":28,... 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:29:54,767 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,769 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentRemaining=46.238174 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentUsed=0.088845335 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:TotalBlocks=84 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=165801947580 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.088845335 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":0,"us... 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"1278"} 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Free=155876029396 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=299511364 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Threads=606 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_A... 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Used=299511364 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hivepte... 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/t... 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:DfsUsed=47599373 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Remaining=38934529692 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:29:54,771 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=66 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,773 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@5023d7c1 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=62 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=38934521500 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=97457286 2018-07-21T05:29:54,775 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:54,776 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=84 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,780 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlocksTotal=84 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:29:54-0700","windows":[{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],... 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityUsed=299511364 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityRemaining=155876029396 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncTimes=32 15 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FilesTotal=152 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncCount=1021 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:29:54,792 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=1 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=1.0216998191681737 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=29.0 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=553 2018-07-21T05:29:54,796 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=20 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.6136363636363636 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=202 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=1.0 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=32.25 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.06578947368421052 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.40923076923076923 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=325 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=532 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=172 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=1.1506849315068493 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=76 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.6052631578947368 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=44 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=9 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.7454545454545455 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.6666666666666666 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=173 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=28 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=73 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=7 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.1428571428571428 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.5375722543352601 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.4127906976744187 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=1.3430232558139534 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=3 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=172 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=110 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.7142857142857143 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.698019801980198 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=800 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=19 2018-07-21T05:29:54,800 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.69875 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=36 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=239.08789 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=675 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCount=22 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=17 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=467.62674 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=419 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1805 2018-07-21T05:29:54,804 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=3043 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=2 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8888888888888888 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:SentBytes=1519 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=9 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=9 2018-07-21T05:29:54,805 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsWaiting=36 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=239.09183 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=800 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCount=22 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=17 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapUsedM=467.62674 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=419 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillis=1805 2018-07-21T05:29:54,809 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=1016 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=70 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=3.525 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:29:54,812 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=200 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=143 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=405.3550724637681 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=17 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=138 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1.0837944836614173E7 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=275 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95199808 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=2.041958041958042 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=17 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1472 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1472 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=97720107 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=17 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=9456.474864130434 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=200 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=138 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=2.965 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=156 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=1777 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=138 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=26386.96510973551 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=164583.1426630435 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=23.705882352941178 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,813 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,816 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:29:54,816 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:29:54,816 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:29:54,816 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,816 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=402 2018-07-21T05:29:54,818 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,821 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:29:54,821 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:29:54,821 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8888888888888888 2018-07-21T05:29:54,821 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,821 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=9 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,822 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=36 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=239.11829 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=117 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCount=22 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=17 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=468.5343 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=419 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1805 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@58a64dee 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=70 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39068631040 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=98469106 2018-07-21T05:29:54,827 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,828 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,829 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:29:54,830 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-... 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=61793 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=1.4615384615384615 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=6 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=6.717948717948718 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:SentBytes=25965 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=39 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=39 2018-07-21T05:29:54,833 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:DfsUsed=97457286 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Remaining=38934365852 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=62 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:29:54,834 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=36 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=239.13423 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=792 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCount=22 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1456.5 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=17 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=471.3269 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=419 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1805 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=6 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=7 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:29:54,839 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=1024 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=66 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=2.5 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=200 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=131 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=334.54330708661416 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=25 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=127 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=3031354.9130859375 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=220 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=51490414 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=1.900763358778626 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=25 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=827 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=827 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=55386625 2018-07-21T05:29:54,841 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=25 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=9245.503022974606 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=200 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=127 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=1.925 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=87 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=1124 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=127 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=16859.624555160142 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=242037.93712212818 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=12.28 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:29:54,842 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersKilled=2 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=2 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=76.0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=2 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:DfsUsed=55985599 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Remaining=38934308508 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=66 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsNumOps=6 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=15.333333333333334 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:29:54,844 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:29:54,845 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=902 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=66 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=2.08 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=200 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=370.248 2018-07-21T05:29:54,846 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=31 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=125 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=5214523.457871397 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=12 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2500585 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=1.8992248062015504 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=31 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=97 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=97 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=46477089 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=31 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=11813.762886597939 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=200 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=125 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=1.63 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=78 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=983 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=125 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=10843.397761953205 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=7612.7525773195875 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=3.6451612903225805 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: << End NameNode metrics dump 2018-07-21T05:29:55,284 INFO [pool-12-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"597"}] 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":32,"usedSpace":3858704,"freeSpace":19534233600,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":30,"usedSpace":52126895,"freeSpace":19400017564,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036}} 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=65599 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.23465703971119134 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0001_000001":1} 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=1.0722021660649819 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=51762 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=554 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=554 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=388.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=8493.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:29:55,285 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=1278 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=1278 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=152 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=84 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=299511364 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=84 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=155876029396 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=165801947580 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=32 15 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=1021 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=1278 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=145.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@56b1107d 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=66 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=38934210204 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=55985599 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=2 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=1174021 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.3923686105111591 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=51 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":2} 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=1.0169186465082793 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=2 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=383576 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=2778 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=2778 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=36 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=239.01775 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=878 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=22 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=17 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=464.8285 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=419 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1805 2018-07-21T05:29:55,286 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=3.027027027027027 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=129.0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=37 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=21.0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":29,"usedSpace":2902918,"freeSpace":19399984796,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":31,"usedSpace":44696455,"freeSpace":19534200832,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=6 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=7 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:29:55,287 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@5876e4db 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=66 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=38934169244 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=47599373 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=1 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":36,"usedSpace":53961089,"freeSpace":19534192640,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":32,"usedSpace":44508017,"freeSpace":19534192640,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=36 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=239.026 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=484 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=22 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,288 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=17 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=464.8285 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=419 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1805 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=114525 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.25 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=4 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=8.0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=1180 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=4 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=4 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=4 2018-07-21T05:29:55,289 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=2 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=2 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=7.0 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=9.0 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,291 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=258 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=1020 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.045098039215686274 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.10954616588419405 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.049586776859504134 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=362 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=121 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=28 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=988 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=172 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=98 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=1278 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=532 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=172 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=120 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=3 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=63 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=325 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=93 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=211 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=21 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=73 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:29:55,292 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=171781 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.6 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=5 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=55.2 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1491 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=5 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=5 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=5 2018-07-21T05:29:55,293 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,294 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:29:55,294 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:29:55,294 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=98469106 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39068794880 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:29:55,295 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=70 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=858 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=62 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=3.43 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=435.0243902439024 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=19 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=123 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=6467595.43006993 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=670 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195072265 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=1.426356589147287 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=19 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=2988 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=2988 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=96506717 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=19 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=11070.755354752342 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=200 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=123 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=2.915 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=165 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=1753 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=123 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=14372.201939532231 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=195133.55220883535 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=41.68421052631579 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,296 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:29:55,297 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=3 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":28,"usedSpace":43844742,"freeSpace":19399939740,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":34,"usedSpace":53612544,"freeSpace":19534155776,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:29:55,298 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,299 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.238174 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.088845335 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=84 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=165801947580 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.088845335 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":97457286,"adminState":"In Service","nonDfsUsedSpace":41427659642,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":97457286,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":97457286,"blockPoolUsedPercent":0.11563669,"volfails":0,"lastBlockReport":9},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":47599373,"adminState":"In Service","nonDfsUsedSpace":41477517555,"capacity":84278861824,"numBlocks":59,"version":"3.1.0","used":47599373,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":47599373,"blockPoolUsedPercent":0.056478422,"volfails":0,"lastBlockReport":9},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":0,"usedSpace":55985599,"adminState":"In Service","nonDfsUsedSpace":41470122561,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":55985599,"remaining":38934709916,"blockScheduled":1,"blockPoolUsed":55985599,"blockPoolUsedPercent":0.066429,"volfails":0,"lastBlockReport":9},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":98469106,"adminState":"In Service","nonDfsUsedSpace":41426647822,"capacity":84278861824,"numBlocks":68,"version":"3.1.0","used":98469106,"remaining":39069917184,"blockScheduled":0,"blockPoolUsed":98469106,"blockPoolUsedPercent":0.11683724,"volfails":0,"lastBlockReport":9}} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"1278"} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=155876029396 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=299511364 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=606 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=299511364 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:29:55,301 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=47599373 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=38934529692 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:29:55,302 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=66 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,303 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@5c497b0d 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=62 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=38934054556 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=97457286 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,304 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=84 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:29:55,305 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,306 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=84 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:29:55-0700","windows":[{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":24}],"totalCount":24},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"*","topUsers":[{"user":"hiveptest","count":225}],"totalCount":225},{"opType":"delete","topUsers":[{"user":"hiveptest","count":29}],"totalCount":29},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"rename","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"create","topUsers":[{"user":"hiveptest","count":49}],"totalCount":49},{"opType":"open","topUsers":[{"user":"hiveptest","count":15}],"totalCount":15}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":10}],"totalCount":10},{"opType":"*","topUsers":[{"user":"hiveptest","count":509}],"totalCount":509},{"opType":"delete","topUsers":[{"user":"hiveptest","count":68}],"totalCount":68},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":117}],"totalCount":117},{"opType":"rename","topUsers":[{"user":"hiveptest","count":27}],"totalCount":27},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":97}],"totalCount":97},{"opType":"create","topUsers":[{"user":"hiveptest","count":100}],"totalCount":100},{"opType":"open","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":9}],"totalCount":9},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":76}],"totalCount":76},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":28}],"totalCount":28},{"opType":"*","topUsers":[{"user":"hiveptest","count":1043}],"totalCount":1043},{"opType":"delete","topUsers":[{"user":"hiveptest","count":110}],"totalCount":110},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":325}],"totalCount":325},{"opType":"rename","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":202}],"totalCount":202},{"opType":"create","topUsers":[{"user":"hiveptest","count":172}],"totalCount":172},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"open","topUsers":[{"user":"hiveptest","count":73}],"totalCount":73}],"windowLenMs":1500000}]} 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=299511364 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=155876029396 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=32 15 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=152 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=1021 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:29:55,308 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=1 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=1.0216998191681737 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=29.0 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=553 2018-07-21T05:29:55,309 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=20 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.6136363636363636 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=202 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=1.0 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=32.25 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.06578947368421052 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.40923076923076923 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=325 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=532 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=172 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=1.1506849315068493 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=76 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.6052631578947368 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=44 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=9 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.7454545454545455 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.6666666666666666 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=173 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=28 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=73 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=7 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.1428571428571428 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.5375722543352601 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.4127906976744187 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=1.3430232558139534 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=3 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=172 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=110 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.7142857142857143 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.698019801980198 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=800 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=19 2018-07-21T05:29:55,310 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.69875 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=36 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=239.08789 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=675 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=22 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=17 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=467.62674 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=419 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1805 2018-07-21T05:29:55,311 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=3043 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=2 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8888888888888888 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=1519 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=9 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=9 2018-07-21T05:29:55,314 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=36 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=239.09183 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=800 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=22 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=17 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=467.62674 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=419 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1805 2018-07-21T05:29:55,315 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=1016 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=70 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=3.525 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=143 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=405.3550724637681 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=17 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=138 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1.0837944836614173E7 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=275 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95199808 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=2.041958041958042 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=17 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,316 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1472 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1472 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=97720107 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=17 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=9456.474864130434 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=200 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=138 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=2.965 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=156 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=1777 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=138 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=26386.96510973551 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=164583.1426630435 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=23.705882352941178 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,317 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,318 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=402 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8888888888888888 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=9 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=36 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=239.11829 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=117 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=22 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=17 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=468.5343 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=419 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1805 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:29:55,322 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@194beee0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=70 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39068164096 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=98469106 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176095525,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176094507,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:29:55,323 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=61793 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=1.4615384615384615 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=6 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=6.717948717948718 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=25965 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=39 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=39 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=97457286 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=38934365852 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=62 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=36 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=239.13423 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=792 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=22 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=17 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=471.3269 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=419 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1805 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=6 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=7 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=1024 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=66 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=2.5 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=131 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=334.54330708661416 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=25 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=127 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=3031354.9130859375 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=220 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=51490414 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=1.900763358778626 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=25 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=827 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=827 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,324 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=55386625 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=25 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=9245.503022974606 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=200 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=127 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=1.925 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=87 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=1124 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=127 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=16859.624555160142 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=242037.93712212818 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=12.28 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=2 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=2 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=76.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=2 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=55985599 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=38934308508 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=66 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=6 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=15.333333333333334 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=902 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=66 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=2.08 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=370.248 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=31 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=125 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=5214523.457871397 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=12 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2500585 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=1.8992248062015504 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=31 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=97 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=97 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=46477089 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=31 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=11813.762886597939 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=200 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=125 2018-07-21T05:29:55,325 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=1.63 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=78 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=983 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=125 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=10843.397761953205 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=7612.7525773195875 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=3.6451612903225805 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:29:55,652 INFO [pool-19-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":32,"usedSpace":3858704,"freeSpace":19534008320,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":30,"usedSpace":52126895,"freeSpace":19399792284,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036}} 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=65599 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.23465703971119134 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0001_000001":1} 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=1.0722021660649819 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=51762 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=554 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=554 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=388.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=8493.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:29:55,653 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=1278 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=1278 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=152 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=84 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=299511364 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=84 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=155876029396 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=165801947580 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=32 15 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=1021 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=1278 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=145.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@346c0bc2 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=66 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=38933759644 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=55985599 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=2 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=1174021 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.3923686105111591 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=51 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":2} 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=1.0169186465082793 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=2 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=383576 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=2778 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=2778 2018-07-21T05:29:55,654 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=36 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=239.01775 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=878 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=22 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=17 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=464.8285 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=419 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1805 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=3.027027027027027 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=1 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=129.0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=37 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=1 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=21.0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":29,"usedSpace":2902918,"freeSpace":19399763612,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":31,"usedSpace":44696455,"freeSpace":19533979648,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:29:55,655 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=6 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=7 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@3aabe33f 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=66 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=38933726876 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=47599373 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":36,"usedSpace":53961089,"freeSpace":19533967360,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":32,"usedSpace":44508017,"freeSpace":19533967360,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:29:55,656 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=36 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=239.026 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=484 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=22 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=17 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=464.8285 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=419 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1805 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=114525 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.25 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=4 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=8.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=1180 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=4 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=4 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=4 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=2 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=2 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=7.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=9.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=258 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=1020 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.045098039215686274 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.10954616588419405 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.049586776859504134 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=362 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=121 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=28 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=988 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=172 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=98 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=1278 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=532 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=172 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=120 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=3 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=63 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=325 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=93 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=211 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=21 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=73 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=171781 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.6 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=55.2 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1491 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=5 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=98469106 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39068794880 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=70 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=858 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=62 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=3.43 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,657 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=435.0243902439024 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=19 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=123 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=6467595.43006993 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=670 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195072265 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=1.426356589147287 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=19 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=2988 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=2988 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=96506717 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=19 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=11070.755354752342 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=200 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=123 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=2.915 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=165 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=1753 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=123 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=14372.201939532231 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=195133.55220883535 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=41.68421052631579 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=3 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":28,"usedSpace":43844742,"freeSpace":19399714460,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":34,"usedSpace":53612544,"freeSpace":19533930496,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,658 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.238174 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.088845335 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=84 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=165801947580 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.088845335 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":97457286,"adminState":"In Service","nonDfsUsedSpace":41427659642,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":97457286,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":97457286,"blockPoolUsedPercent":0.11563669,"volfails":0,"lastBlockReport":9},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":47599373,"adminState":"In Service","nonDfsUsedSpace":41477517555,"capacity":84278861824,"numBlocks":59,"version":"3.1.0","used":47599373,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":47599373,"blockPoolUsedPercent":0.056478422,"volfails":0,"lastBlockReport":9},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":55985599,"adminState":"In Service","nonDfsUsedSpace":41470122561,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":55985599,"remaining":38934709916,"blockScheduled":1,"blockPoolUsed":55985599,"blockPoolUsedPercent":0.066429,"volfails":0,"lastBlockReport":9},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":98469106,"adminState":"In Service","nonDfsUsedSpace":41426647822,"capacity":84278861824,"numBlocks":68,"version":"3.1.0","used":98469106,"remaining":39069917184,"blockScheduled":0,"blockPoolUsed":98469106,"blockPoolUsedPercent":0.11683724,"volfails":0,"lastBlockReport":9}} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"1278"} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=155876029396 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=299511364 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=606 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=299511364 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=47599373 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=38934529692 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=66 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@3e9460b2 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=62 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=38933612188 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=97457286 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:55,660 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=84 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,661 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=84 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:29:55-0700","windows":[{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":24}],"totalCount":24},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"*","topUsers":[{"user":"hiveptest","count":225}],"totalCount":225},{"opType":"delete","topUsers":[{"user":"hiveptest","count":29}],"totalCount":29},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"rename","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"create","topUsers":[{"user":"hiveptest","count":49}],"totalCount":49},{"opType":"open","topUsers":[{"user":"hiveptest","count":15}],"totalCount":15}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":10}],"totalCount":10},{"opType":"*","topUsers":[{"user":"hiveptest","count":509}],"totalCount":509},{"opType":"delete","topUsers":[{"user":"hiveptest","count":68}],"totalCount":68},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":117}],"totalCount":117},{"opType":"rename","topUsers":[{"user":"hiveptest","count":27}],"totalCount":27},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":97}],"totalCount":97},{"opType":"create","topUsers":[{"user":"hiveptest","count":100}],"totalCount":100},{"opType":"open","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":9}],"totalCount":9},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":76}],"totalCount":76},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":28}],"totalCount":28},{"opType":"*","topUsers":[{"user":"hiveptest","count":1043}],"totalCount":1043},{"opType":"delete","topUsers":[{"user":"hiveptest","count":110}],"totalCount":110},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":325}],"totalCount":325},{"opType":"rename","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":202}],"totalCount":202},{"opType":"create","topUsers":[{"user":"hiveptest","count":172}],"totalCount":172},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"open","topUsers":[{"user":"hiveptest","count":73}],"totalCount":73}],"windowLenMs":1500000}]} 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=299511364 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=155876029396 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=32 15 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=152 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=1021 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=1 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=1.0216998191681737 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=29.0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=553 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=20 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.6136363636363636 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=202 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=1.0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=32.25 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.06578947368421052 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.40923076923076923 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=325 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=532 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=172 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=1.1506849315068493 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=76 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.6052631578947368 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=44 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=9 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.7454545454545455 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.6666666666666666 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=173 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=28 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=73 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=7 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.1428571428571428 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.5375722543352601 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.4127906976744187 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=1.3430232558139534 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=3 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=172 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=110 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.7142857142857143 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.698019801980198 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=800 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=19 2018-07-21T05:29:55,662 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.69875 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=36 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=239.08789 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=675 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=22 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=17 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=467.62674 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=419 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1805 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=3043 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=2 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8888888888888888 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=1519 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=9 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=9 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=36 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=239.09183 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=800 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=22 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=17 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=467.62674 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=419 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1805 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=1016 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=70 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=3.525 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=143 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=405.3550724637681 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=17 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=138 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1.0837944836614173E7 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=275 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95199808 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=2.041958041958042 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=17 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1472 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1472 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=97720107 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=17 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=9456.474864130434 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=200 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=138 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=2.965 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=156 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=1777 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=138 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=26386.96510973551 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=164583.1426630435 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=23.705882352941178 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,663 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=402 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8888888888888888 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=9 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=36 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=239.11829 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=117 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=22 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=17 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=468.5343 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=419 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1805 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@7b96e904 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=70 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39067713536 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=98469106 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,664 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176095525,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176094507,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=61793 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=1.4615384615384615 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=6 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=6.717948717948718 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=25965 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=39 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=39 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=97457286 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=38934365852 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:29:55,665 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=62 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=36 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=239.13423 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=792 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=22 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1456.5 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=17 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=471.3269 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=419 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1805 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=6 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=7 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=1024 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=66 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=2.5 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=131 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=334.54330708661416 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=25 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=127 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=3031354.9130859375 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=220 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=51490414 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=1.900763358778626 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=25 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=827 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=827 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=55386625 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=25 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=9245.503022974606 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=200 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=127 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=1.925 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=87 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=1124 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=127 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=16859.624555160142 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=242037.93712212818 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=12.28 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=2 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=2 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=76.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=2 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,666 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=55985599 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=38934308508 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=66 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=6 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=15.333333333333334 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=902 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=66 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=2.08 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=200 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=370.248 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=31 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=125 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=5214523.457871397 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=12 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2500585 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=1.8992248062015504 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=31 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=97 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=97 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=46477089 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=31 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=11813.762886597939 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=200 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=125 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=1.63 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=78 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=983 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=125 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=10843.397761953205 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=7612.7525773195875 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=3.6451612903225805 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:29:56,346 INFO [pool-33-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"598"}] 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":32,"usedSpace":3858704,"freeSpace":19534155776,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":30,"usedSpace":52126895,"freeSpace":19399939740,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036}} 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=65599 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.23465703971119134 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,347 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0001_000001":1} 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=1.0722021660649819 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=51762 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=554 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=554 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=388.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=8493.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=1278 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=1278 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=152 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=84 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=299511364 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=84 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=155876029396 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=165801947580 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=32 15 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=1021 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=1278 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=145.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@1c613542 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=66 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=38934054556 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=55985599 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=2 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=1174021 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.3923686105111591 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=51 2018-07-21T05:29:56,348 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":2} 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=1.0169186465082793 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=2 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=383576 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=2778 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=2778 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=36 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=239.01775 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=878 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=22 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=17 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=464.8285 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=419 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1805 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=3.027027027027027 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=129.0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=37 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=21.0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"599"}] 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":29,"usedSpace":2902918,"freeSpace":19399911068,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":31,"usedSpace":44696455,"freeSpace":19534127104,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=6 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=7 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:29:56,349 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@12a6c8be 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=66 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=38934021788 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=47599373 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=1 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"599"}] 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":36,"usedSpace":53961089,"freeSpace":19534114816,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":32,"usedSpace":44508017,"freeSpace":19534114816,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=36 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=239.026 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=484 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=22 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=17 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=464.8285 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=419 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1805 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=114525 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.25 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=4 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=8.0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=1180 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=4 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=4 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=4 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=2 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=2 2018-07-21T05:29:56,350 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=7.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=9.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=258 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=1020 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.045098039215686274 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.10954616588419405 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.049586776859504134 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=362 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=121 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=28 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=988 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=172 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=98 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=1278 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=532 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=172 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=120 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=3 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=63 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=325 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=93 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=211 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=21 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=73 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=171781 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.6 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=5 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=55.2 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1491 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=5 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=5 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=5 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=98469106 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39068794880 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=70 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=858 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=62 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=3.43 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=435.0243902439024 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=19 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=123 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=6467595.43006993 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=670 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195072265 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=1.426356589147287 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=19 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=2988 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=2988 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=96506717 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=19 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=11070.755354752342 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=200 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=123 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=2.915 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=165 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=1753 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=123 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=14372.201939532231 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=195133.55220883535 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=41.68421052631579 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:29:56,351 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:29:56,352 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:29:56,352 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:29:56,352 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=3 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"599"}] 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":28,"usedSpace":43844742,"freeSpace":19399861916,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":34,"usedSpace":53612544,"freeSpace":19534077952,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,357 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.238174 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.088845335 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=84 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=165801947580 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.088845335 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":2,"usedSpace":97457286,"adminState":"In Service","nonDfsUsedSpace":41427659642,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":97457286,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":97457286,"blockPoolUsedPercent":0.11563669,"volfails":0,"lastBlockReport":9},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":2,"usedSpace":47599373,"adminState":"In Service","nonDfsUsedSpace":41477517555,"capacity":84278861824,"numBlocks":59,"version":"3.1.0","used":47599373,"remaining":38935701148,"blockScheduled":1,"blockPoolUsed":47599373,"blockPoolUsedPercent":0.056478422,"volfails":0,"lastBlockReport":9},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":55985599,"adminState":"In Service","nonDfsUsedSpace":41470122561,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":55985599,"remaining":38934709916,"blockScheduled":1,"blockPoolUsed":55985599,"blockPoolUsedPercent":0.066429,"volfails":0,"lastBlockReport":9},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":2,"usedSpace":98469106,"adminState":"In Service","nonDfsUsedSpace":41426647822,"capacity":84278861824,"numBlocks":68,"version":"3.1.0","used":98469106,"remaining":39069917184,"blockScheduled":0,"blockPoolUsed":98469106,"blockPoolUsedPercent":0.11683724,"volfails":0,"lastBlockReport":9}} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"1278"} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=155876029396 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=299511364 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=606 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=299511364 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=47599373 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=38934529692 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=66 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,358 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@6e6901d2 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=62 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=38933907100 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=97457286 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=84 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,359 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=84 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:29:56-0700","windows":[{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":24}],"totalCount":24},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"*","topUsers":[{"user":"hiveptest","count":225}],"totalCount":225},{"opType":"delete","topUsers":[{"user":"hiveptest","count":29}],"totalCount":29},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"rename","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"create","topUsers":[{"user":"hiveptest","count":49}],"totalCount":49},{"opType":"open","topUsers":[{"user":"hiveptest","count":15}],"totalCount":15}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":10}],"totalCount":10},{"opType":"*","topUsers":[{"user":"hiveptest","count":509}],"totalCount":509},{"opType":"delete","topUsers":[{"user":"hiveptest","count":68}],"totalCount":68},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":117}],"totalCount":117},{"opType":"rename","topUsers":[{"user":"hiveptest","count":27}],"totalCount":27},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":97}],"totalCount":97},{"opType":"create","topUsers":[{"user":"hiveptest","count":100}],"totalCount":100},{"opType":"open","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":9}],"totalCount":9},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":76}],"totalCount":76},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":28}],"totalCount":28},{"opType":"*","topUsers":[{"user":"hiveptest","count":1043}],"totalCount":1043},{"opType":"delete","topUsers":[{"user":"hiveptest","count":110}],"totalCount":110},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":325}],"totalCount":325},{"opType":"rename","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":202}],"totalCount":202},{"opType":"create","topUsers":[{"user":"hiveptest","count":172}],"totalCount":172},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"open","topUsers":[{"user":"hiveptest","count":73}],"totalCount":73}],"windowLenMs":1500000}]} 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=299511364 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=155876029396 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=32 15 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=152 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=1021 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=1 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=1.0216998191681737 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=29.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=553 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=20 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.6136363636363636 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=202 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=1.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=32.25 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.06578947368421052 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.40923076923076923 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=325 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=532 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=172 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=1.1506849315068493 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=76 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.6052631578947368 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=44 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=9 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.7454545454545455 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.6666666666666666 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=173 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=28 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=73 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=7 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.1428571428571428 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.5375722543352601 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.4127906976744187 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=1.3430232558139534 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=3 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=172 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=110 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.7142857142857143 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.698019801980198 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=800 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=19 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.69875 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=36 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=239.08789 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=675 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=22 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=17 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=467.62674 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=419 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1805 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=3043 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=2 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8888888888888888 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=1519 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=9 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:29:56,360 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=9 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=36 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=239.09183 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=800 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=22 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=17 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=467.62674 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=419 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1805 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=1016 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=70 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=3.525 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=143 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=405.3550724637681 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=17 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=138 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1.0837944836614173E7 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=275 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95199808 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=2.041958041958042 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=17 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1472 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1472 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=97720107 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=17 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=9456.474864130434 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=200 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=138 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=2.965 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=156 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=1777 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=138 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=26386.96510973551 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=164583.1426630435 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=23.705882352941178 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=402 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8888888888888888 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=9 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=36 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=239.11829 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=117 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=22 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=17 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=468.5343 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=419 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1805 2018-07-21T05:29:56,361 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@514f05db 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=70 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39068016640 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=98469106 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:29:56,366 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176095525,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176094507,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=61793 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=1.4615384615384615 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=6 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=6.717948717948718 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=25965 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=39 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=39 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:29:56,367 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=97457286 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=38934365852 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=62 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=36 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=239.13423 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=792 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=22 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=17 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=471.3269 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=419 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1805 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=6 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=7 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=1024 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=66 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=2.5 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=131 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=334.54330708661416 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=25 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=127 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=3031354.9130859375 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=220 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=51490414 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=1.900763358778626 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=25 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=827 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=827 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=55386625 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=25 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=9245.503022974606 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=200 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=127 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=1.925 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=87 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=1124 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=127 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=16859.624555160142 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=242037.93712212818 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=12.28 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:29:56,368 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=2 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=2 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=76.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=2 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=55985599 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=38934308508 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=66 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=6 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=15.333333333333334 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=902 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=66 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=2.08 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=370.248 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=31 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=125 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=5214523.457871397 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=12 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2500585 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=1.8992248062015504 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=31 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=97 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=97 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=46477089 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=31 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=11813.762886597939 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=200 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=125 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=1.63 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=78 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:29:56,369 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=983 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=125 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=10843.397761953205 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=7612.7525773195875 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=3.6451612903225805 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:29:56,916 INFO [pool-46-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"599"}] 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":32,"usedSpace":3858704,"freeSpace":19533930496,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":30,"usedSpace":52126895,"freeSpace":19399714460,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036}} 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=65599 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.23465703971119134 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0001_000001":1} 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=1.0722021660649819 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=51762 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=554 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=554 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=388.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=8493.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:29:56,917 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=1278 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=1278 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=152 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=84 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=299511364 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=84 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=155876029396 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=165801947580 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=32 15 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=1021 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=1278 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=145.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@337b2aff 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=66 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=38933612188 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=55985599 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=2 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=1174021 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.3923686105111591 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=51 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":2} 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=1.0169186465082793 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=2 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=383576 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=2778 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:29:56,918 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=2778 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=36 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=239.01775 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=878 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=22 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=17 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=464.8285 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=419 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1805 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=3.027027027027027 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=129.0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=37 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=21.0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"600"}] 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":29,"usedSpace":2902918,"freeSpace":19399685788,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":31,"usedSpace":44696455,"freeSpace":19533901824,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=6 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=7 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:29:56,919 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@3dbcd2e0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=66 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=38933571228 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=47599373 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=1 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"600"}] 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":36,"usedSpace":53961089,"freeSpace":19533893632,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":32,"usedSpace":44508017,"freeSpace":19533893632,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=36 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=239.026 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=484 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=22 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=17 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=464.8285 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=419 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1805 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=114525 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.25 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=4 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=8.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=1180 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=4 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=4 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=4 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=2 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=2 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=7.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=9.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=258 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=1020 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.045098039215686274 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.10954616588419405 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.049586776859504134 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=362 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=121 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=28 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=988 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=172 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=98 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=1278 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=532 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=172 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=120 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=3 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=63 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=325 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=93 2018-07-21T05:29:56,920 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=211 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=21 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=73 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=171781 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.6 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=5 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=55.2 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1491 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=5 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=5 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=5 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=98469106 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39068794880 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=70 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=858 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=62 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=3.43 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=435.0243902439024 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=19 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=123 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=6467595.43006993 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=670 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195072265 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=1.426356589147287 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=19 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=2988 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=2988 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=96506717 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=19 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=11070.755354752342 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=200 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=123 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=2.915 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=165 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=1753 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=123 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=14372.201939532231 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=195133.55220883535 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=41.68421052631579 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=3 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"600"}] 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":28,"usedSpace":43844742,"freeSpace":19399640732,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134216036},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":34,"usedSpace":53612544,"freeSpace":19533856768,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:29:56,921 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,922 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.236343 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.088845335 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=84 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=165808116156 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.088845335 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":0,"usedSpace":97457286,"adminState":"In Service","nonDfsUsedSpace":41429715834,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":97457286,"remaining":38933644956,"blockScheduled":1,"blockPoolUsed":97457286,"blockPoolUsedPercent":0.11563669,"volfails":0,"lastBlockReport":10},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":0,"usedSpace":47599373,"adminState":"In Service","nonDfsUsedSpace":41479573747,"capacity":84278861824,"numBlocks":59,"version":"3.1.0","used":47599373,"remaining":38933644956,"blockScheduled":1,"blockPoolUsed":47599373,"blockPoolUsedPercent":0.056478422,"volfails":0,"lastBlockReport":10},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":2,"usedSpace":55985599,"adminState":"In Service","nonDfsUsedSpace":41470122561,"capacity":84278861824,"numBlocks":61,"version":"3.1.0","used":55985599,"remaining":38934709916,"blockScheduled":1,"blockPoolUsed":55985599,"blockPoolUsedPercent":0.066429,"volfails":0,"lastBlockReport":9},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":0,"usedSpace":98469106,"adminState":"In Service","nonDfsUsedSpace":41428704014,"capacity":84278861824,"numBlocks":68,"version":"3.1.0","used":98469106,"remaining":39067860992,"blockScheduled":0,"blockPoolUsed":98469106,"blockPoolUsedPercent":0.11683724,"volfails":0,"lastBlockReport":10}} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"1278"} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=155869860820 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=299511364 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=606 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=299511364 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=47599373 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=38934529692 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=66 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@3983418f 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:29:56,923 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=62 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=38933456540 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=97457286 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=84 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,924 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=84 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:29:56-0700","windows":[{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":24}],"totalCount":24},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"*","topUsers":[{"user":"hiveptest","count":225}],"totalCount":225},{"opType":"delete","topUsers":[{"user":"hiveptest","count":29}],"totalCount":29},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"rename","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"create","topUsers":[{"user":"hiveptest","count":49}],"totalCount":49},{"opType":"open","topUsers":[{"user":"hiveptest","count":15}],"totalCount":15}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":48}],"totalCount":48},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":10}],"totalCount":10},{"opType":"*","topUsers":[{"user":"hiveptest","count":509}],"totalCount":509},{"opType":"delete","topUsers":[{"user":"hiveptest","count":68}],"totalCount":68},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":117}],"totalCount":117},{"opType":"rename","topUsers":[{"user":"hiveptest","count":27}],"totalCount":27},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":97}],"totalCount":97},{"opType":"create","topUsers":[{"user":"hiveptest","count":100}],"totalCount":100},{"opType":"open","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":9}],"totalCount":9},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":76}],"totalCount":76},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":28}],"totalCount":28},{"opType":"*","topUsers":[{"user":"hiveptest","count":1043}],"totalCount":1043},{"opType":"delete","topUsers":[{"user":"hiveptest","count":110}],"totalCount":110},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":325}],"totalCount":325},{"opType":"rename","topUsers":[{"user":"hiveptest","count":44}],"totalCount":44},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":202}],"totalCount":202},{"opType":"create","topUsers":[{"user":"hiveptest","count":172}],"totalCount":172},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"open","topUsers":[{"user":"hiveptest","count":73}],"totalCount":73}],"windowLenMs":1500000}]} 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=299511364 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=155869860820 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=32 15 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=152 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=1021 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=1 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=1.0216998191681737 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=29.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=553 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=20 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.6136363636363636 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=202 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=1.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=32.25 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.06578947368421052 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.40923076923076923 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=325 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=532 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=172 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=1.1506849315068493 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=76 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.6052631578947368 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=44 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=9 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.7454545454545455 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.6666666666666666 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=173 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=28 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=73 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=7 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.1428571428571428 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.5375722543352601 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.4127906976744187 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=1.3430232558139534 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=3 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=172 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=110 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.7142857142857143 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.698019801980198 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=800 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=19 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.69875 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=36 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=239.08789 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=675 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=22 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=17 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=467.62674 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=419 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1805 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=3043 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=2 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8888888888888888 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=1519 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=9 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=9 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=36 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=251.39844 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=239.09183 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=800 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=22 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=17 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=467.62674 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=419 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:29:56,925 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1805 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=1016 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=70 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=3.525 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=143 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=405.3550724637681 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=17 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=138 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1.0837944836614173E7 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=275 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95199808 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=2.041958041958042 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=17 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1472 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1472 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=97720107 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=17 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=9456.474864130434 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=200 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=138 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=2.965 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=156 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=1777 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=138 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=26386.96510973551 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=164583.1426630435 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=23.705882352941178 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=402 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8888888888888888 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=9 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=36 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=239.11829 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=117 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=22 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=17 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=468.5343 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=419 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1805 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@7c81d8db 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=70 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39067566080 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=98469106 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:29:56,926 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176095525,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176094507,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=61793 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=1.4615384615384615 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=6 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=6.717948717948718 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=25965 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=39 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=39 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=97457286 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=38934365852 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=62 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=36 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:29:56,927 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=251.46094 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=239.13423 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=792 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=22 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1456.5 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=17 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=631 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=471.3269 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=419 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1805 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=6 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=7 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=6 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=1024 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=66 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=2.5 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=131 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=334.54330708661416 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=25 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=127 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=3031354.9130859375 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=220 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=51490414 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=1.900763358778626 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=25 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=827 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=827 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=55386625 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=25 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=9245.503022974606 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=200 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=127 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=1.925 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=87 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=1124 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=127 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=16859.624555160142 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=242037.93712212818 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=12.28 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=2 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=2 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=76.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=2 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=55985599 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=38934308508 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=66 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=6 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:29:56,928 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=15.333333333333334 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=902 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=66 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=2.08 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=200 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=129 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=370.248 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=31 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=125 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=5214523.457871397 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=12 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2500585 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=1.8992248062015504 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=31 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=97 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=97 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=46477089 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=31 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=11813.762886597939 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=200 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=125 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=1.63 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=78 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=983 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=125 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=10843.397761953205 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=7612.7525773195875 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=3.6451612903225805 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:29:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:29:59,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:29:59,080 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,080 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:29:59,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:29:59,082 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:29:59,084 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,084 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:29:59,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:29:59,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:29:59,089 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,089 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:29:59,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:29:59,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:29:59,092 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,092 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:29:59,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:29:59,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:29:59,095 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,095 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:29:59,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:29:59,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:29:59,106 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,106 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:29:59,106 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:29:59,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:29:59,110 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,110 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:29:59,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:29:59,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:29:59,114 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,114 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:29:59,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:29:59,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:29:59,118 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,118 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:29:59,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:29:59,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:29:59,121 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,121 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:29:59,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:29:59,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:29:59,124 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,124 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:29:59,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:29:59,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:29:59,127 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:29:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:29:59,127 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:29:59,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:30:14,679 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:30:14,679 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:30:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:30:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:30:19,236 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:30:19,299 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:30:29,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:30:29,132 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,132 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:30:29,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:30:29,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:30:29,136 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,137 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:30:29,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:30:29,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:30:29,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:30:29,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:30:29,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:30:29,144 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,144 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:30:29,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:30:29,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:30:29,148 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,148 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:30:29,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:30:29,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:30:29,152 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,152 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:30:29,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:30:29,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:30:29,155 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,155 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:30:29,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:30:29,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:30:29,159 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,160 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:30:29,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:30:29,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:30:29,171 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,171 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:30:29,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:30:29,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:30:29,176 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,176 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:30:29,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:30:29,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:30:29,179 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,179 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:30:29,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:30:29,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:30:29,183 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:29,183 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:30:29,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:30:49,236 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:30:49,299 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:30:59,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:30:59,191 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,191 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:30:59,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:30:59,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:30:59,194 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,195 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:30:59,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:30:59,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:30:59,197 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,197 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:30:59,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:30:59,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:30:59,201 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,201 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:30:59,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:30:59,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:30:59,204 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,204 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:30:59,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:30:59,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:30:59,207 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,207 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:30:59,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:30:59,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:30:59,210 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,210 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:30:59,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:30:59,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:30:59,215 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,215 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:30:59,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:30:59,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:30:59,219 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,219 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:30:59,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:30:59,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:30:59,222 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,222 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:30:59,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:30:59,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:30:59,225 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,225 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:30:59,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:30:59,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:30:59,228 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:30:59 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:30:59,228 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:30:59,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:31:19,236 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:31:19,300 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:31:29,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:31:29,233 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,233 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:31:29,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:31:29,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:31:29,236 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,236 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:31:29,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:31:29,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:31:29,241 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,242 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:31:29,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:31:29,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:31:29,246 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,246 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:31:29,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:31:29,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] starting 2018-07-21T05:31:29,251 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,251 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] Got response: 204 No Content 2018-07-21T05:31:29,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_5] response is [] 2018-07-21T05:31:29,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:31:29,256 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,256 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:31:29,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:31:29,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] starting 2018-07-21T05:31:29,264 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,265 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] Got response: 204 No Content 2018-07-21T05:31:29,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_4] response is [] 2018-07-21T05:31:29,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:31:29,268 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,268 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:31:29,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:31:29,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] starting 2018-07-21T05:31:29,277 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,277 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] Got response: 204 No Content 2018-07-21T05:31:29,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_3] response is [] 2018-07-21T05:31:29,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] starting 2018-07-21T05:31:29,280 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,281 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] Got response: 204 No Content 2018-07-21T05:31:29,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_2] response is [] 2018-07-21T05:31:29,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] starting 2018-07-21T05:31:29,284 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,284 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] Got response: 204 No Content 2018-07-21T05:31:29,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:29:12.233-07:00_1] response is [] 2018-07-21T05:31:29,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] starting 2018-07-21T05:31:29,288 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:31:29 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:29,288 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] Got response: 204 No Content 2018-07-21T05:31:29,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_partitioned_table/segments/default.druid_partitioned_table_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:29:12.233-07:00] response is [] 2018-07-21T05:31:49,237 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:31:49,300 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:31:59,290 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Wait time exhausted and we have [12] out of [12] segments not loaded yet 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,308 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,308 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:31:59,308 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:31:59,308 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@druid_partitioned_table 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,312 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:31:59,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,312 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:31:59,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=1, alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=26, getTable_(String, String, )=10} 2018-07-21T05:31:59,312 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721052912_bbcf1c8f-986c-497b-a154-1a4e5b9c8348); Time taken: 167.055 seconds 2018-07-21T05:31:59,313 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:31:59,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,313 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query INSERT OVERWRITE TABLE druid_partitioned_table SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table/.hive-staging_hive_2018-07-21_05-29-12_099_8653997294804690914-1 2018-07-21T05:31:59,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-29-12_099_8653997294804690914-1 2018-07-21T05:31:59,316 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 167.215 seconds 2018-07-21T05:31:59,316 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,316 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:31:59,316 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,316 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:31:59,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:31:59,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,318 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721053159_8edeca6f-9bb9-4202-b60c-badbfb9c9035): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,320 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,320 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:31:59,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,321 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:31:59,321 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:31:59,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,321 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,321 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,331 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:31:59,331 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:31:59,339 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1 2018-07-21T05:31:59,339 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:31:59,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:31:59,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,342 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,342 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,344 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,344 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,345 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:31:59,345 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_partitioned_table 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:31:59,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:31:59,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:31:59,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:31:59,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:31:59,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:31:59,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,371 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:31:59,372 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:31:59,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:31:59,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,373 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:31:59,373 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:31:59,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:31:59,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after top-level introduceDerivedTable HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(_c0=[$0], _c1=[$1]) DruidQuery(table=[[default, druid_partitioned_table]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:31:59,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:31:59,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:31:59,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,409 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:31:59,409 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:31:59,410 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1 2018-07-21T05:31:59,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:31:59,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for druid_partitioned_table TS[0] 2018-07-21T05:31:59,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:31:59,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f0) _c0) (tok_selexpr (. (tok_table_or_col druid_partitioned_table) $f1) _c1)) 2018-07-21T05:31:59,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = druid_partitioned_table{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:31:59,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:31:59,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:31:59,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001/.hive-staging_hive_2018-07-21_05-31-59_319_4913492007188853867-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001 2018-07-21T05:31:59,412 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001/.hive-staging_hive_2018-07-21_05-31-59_319_4913492007188853867-1 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001/.hive-staging_hive_2018-07-21_05-31-59_319_4913492007188853867-1/-ext-10003 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001 row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:31:59,414 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-SEL[1]-FS[2] 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(2) 2018-07-21T05:31:59,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:31:59,415 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-SEL[1]-FS[2] 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-SEL[1]-LIST_SINK[3] 2018-07-21T05:31:59,416 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:31:59,416 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:31:59,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,416 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:$f0, type:bigint, comment:null), FieldSchema(name:$f1, type:bigint, comment:null)], properties:null) 2018-07-21T05:31:59,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:31:59,417 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing operator TS[0] 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Operator 0 TS initialized 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing children of 0 TS 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing child 1 SEL 2018-07-21T05:31:59,418 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing operator SEL[1] 2018-07-21T05:31:59,418 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: SELECT struct<$f0:bigint,$f1:bigint> 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Operator 1 SEL initialized 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing children of 1 SEL 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing child 3 LIST_SINK 2018-07-21T05:31:59,418 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[3] 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 3 LIST_SINK initialized 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK done is reset. 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL done is reset. 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS done is reset. 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,418 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:31:59,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=3, getTable_(String, String, )=22, flushCache_()=0, getAllDatabases_()=2, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:31:59,418 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721053159_8edeca6f-9bb9-4202-b60c-badbfb9c9035); Time taken: 0.1 seconds 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721053159_8edeca6f-9bb9-4202-b60c-badbfb9c9035): SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@druid_partitioned_table 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@druid_partitioned_table 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721053159_8edeca6f-9bb9-4202-b60c-badbfb9c9035); Time taken: 0.0 seconds 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table 2018-07-21T05:31:59,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<$f0:bigint,$f1:bigint> 2018-07-21T05:31:59,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, last_modified_by=hive_test_user, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532176152, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, last_modified_time=1532176152, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} partition properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table, last_modified_by=hive_test_user, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_partitioned_table, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_partitioned_table { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532176152, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, last_modified_time=1532176152, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_partitioned_table","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, druid.segment.targetShardsPerGranularity=6, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_partitioned_table} 2018-07-21T05:31:59,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidQueryRecordReader: Retrieving data from druid using query: TimeseriesQuery{dataSource='default.druid_partitioned_table', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}} 2018-07-21T05:31:59,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] starting 2018-07-21T05:31:59,424 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:31:59 GMT Content-Type: application/x-jackson-smile X-Druid-Query-Id: 1611ccd3-74cf-47a1-9de6-fc97bd982769 X-Druid-Response-Context: {} Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got response: 200 OK 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@4a9919 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 6B, last=false 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:31:59,429 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] Got chunk: 0B, last=true 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: close called for operator TS[0] 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing operator TS[0] 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_TS_0:0, 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing child = SEL[1] 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: close called for operator SEL[1] 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing operator SEL[1] 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_SEL_1:0, 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing child = LIST_SINK[3] 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[3] 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[3] 2018-07-21T05:31:59,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_3:0, 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 3 Close done 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: 1 Close done 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: 0 Close done 2018-07-21T05:31:59,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001 2018-07-21T05:31:59,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1/-mr-10001/.hive-staging_hive_2018-07-21_05-31-59_319_4913492007188853867-1 2018-07-21T05:31:59,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_319_4913492007188853867-1 2018-07-21T05:31:59,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.101 seconds 2018-07-21T05:31:59,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:31:59,443 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,443 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:31:59,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: 10 2018-07-21T05:31:59,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,443 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:31:59,443 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:31:59,443 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,445 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3): CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:31:59,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:31:59,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_max_size_partition position=22 2018-07-21T05:31:59,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:31:59,451 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:31:59,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,453 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,453 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:31:59,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:31:59,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,462 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,463 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:31:59,463 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:31:59,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,463 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,463 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:31:59,465 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1 2018-07-21T05:31:59,467 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:31:59,469 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,470 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,471 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,471 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,472 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,472 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,473 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,473 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,474 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:31:59,474 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:31:59,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:31:59,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:31:59,529 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_max_size_partition position=22 2018-07-21T05:31:59,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,529 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:31:59,529 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:31:59,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,531 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,531 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,532 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:31:59,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,532 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,532 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:31:59,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,542 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:31:59,542 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:31:59,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,542 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,542 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:31:59,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:31:59,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:31:59,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:31:59,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:31:59,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:31:59,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:31:59,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:31:59,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:31:59,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:31:59,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,547 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,547 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:31:59,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:31:59,548 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:31:59,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:31:59,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:31:59,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:31:59,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:31:59,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:31:59,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity]] 2018-07-21T05:31:59,552 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:31:59,552 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:31:59,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,554 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:31:59,554 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:31:59,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.372831ms + 0.011074ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:31:59,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:31:59,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:31:59,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:31:59,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:31:59,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:31:59,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:31:59,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:31:59,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:31:59,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_max_size_partition 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:31:59,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:31:59,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=1, isCompatibleWith_(Configuration, )=0, getDatabase_(String, )=8, getTable_(String, String, )=18, flushCache_()=0, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=10, getForeignKeys_(ForeignKeysRequest, )=0} 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3); Time taken: 0.134 seconds 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3): CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_max_size_partition 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:31:59,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:31:59,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:31:59,593 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:31:59,617 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_445_1971394732788950030-1 2018-07-21T05:31:59,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_445_1971394732788950030-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:31:59,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:31:59,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:31:59,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3 2018-07-21T05:31:59,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_max_size_...NULL (Stage-1) 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\nCREATE EXTERNAL TABLE druid_max_size_partition\n STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\n TBLPROPERTIES (\n \"druid.segment.granularity\" = \"HOUR\",\n \"druid.query.granularity\" = \"MINUTE\"\n )\n AS\n SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:31:59,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:31:59,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,622 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 2.98KB 2018-07-21T05:31:59,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:31:59,630 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:31:59,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10002 2018-07-21T05:31:59,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10001 2018-07-21T05:31:59,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,636 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_445_1971394732788950030-1 2018-07-21T05:31:59,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:31:59,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:31:59,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.19KB 2018-07-21T05:31:59,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:31:59,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,659 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=CREATE EXTERNAL TABLE druid_max_size_...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3 } 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:31:59,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:31:59,717 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741997_1173, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_7.recovery 2018-07-21T05:31:59,733 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_7.recovery for DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:31:59,740 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagId=dag_1532175606211_0001_7, dagName=CREATE EXTERNAL TABLE druid_max_size_...NULL (Stage-1) 2018-07-21T05:31:59,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:31:59,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:32:00,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:32:00,275 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0001) 2018-07-21T05:32:00,277 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:32:01,405 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:32:01,423 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0001 2018-07-21T05:32:01,424 DEBUG [ContainersLauncher #2] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #2, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:32:03,297 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:32:05,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:32:05,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:32:08,824 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:32:09,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:32:09,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:32:09,329 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:10,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:10,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:10,589 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,589 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,589 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:10,589 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:10,589 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:10,589 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741998_1174, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/776a1a7280554f74b82cb6eb3ed73654/0_descriptor.json 2018-07-21T05:32:10,645 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/776a1a7280554f74b82cb6eb3ed73654/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:10,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:10,657 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073741999_1175, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/0_index.zip 2018-07-21T05:32:10,669 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/776a1a7280554f74b82cb6eb3ed73654/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:10,692 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,692 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,692 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:10,693 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:10,693 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,693 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,693 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:10,693 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:10,693 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742000_1176, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700.json 2018-07-21T05:32:10,705 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:10,782 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,782 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:10,783 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:10,783 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742001_1177, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ef7ff1134ba49f48ff069d464f5e05c/1_descriptor.json 2018-07-21T05:32:11,197 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ef7ff1134ba49f48ff069d464f5e05c/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,202 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,203 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742002_1178, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/1_index.zip 2018-07-21T05:32:11,211 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ef7ff1134ba49f48ff069d464f5e05c/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,219 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,220 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,220 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742003_1179, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_1.json 2018-07-21T05:32:11,238 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,305 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,306 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742004_1180, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/044e86bffc61418094c624909d5e747c/2_descriptor.json 2018-07-21T05:32:11,313 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/044e86bffc61418094c624909d5e747c/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,317 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,317 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742005_1181, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/2_index.zip 2018-07-21T05:32:11,324 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/044e86bffc61418094c624909d5e747c/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,333 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,334 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742006_1182, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_2.json 2018-07-21T05:32:11,341 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:11,401 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,401 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742007_1183, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f9c8becd96f4693b72391a3b3618840/3_descriptor.json 2018-07-21T05:32:11,408 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f9c8becd96f4693b72391a3b3618840/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,413 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,413 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742008_1184, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/3_index.zip 2018-07-21T05:32:11,420 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f9c8becd96f4693b72391a3b3618840/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,429 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,429 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742009_1185, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_3.json 2018-07-21T05:32:11,436 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,482 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742010_1186, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7d1cd1b019b42b08de1fd202f5509a0/4_descriptor.json 2018-07-21T05:32:11,493 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7d1cd1b019b42b08de1fd202f5509a0/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,497 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,497 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742011_1187, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/4_index.zip 2018-07-21T05:32:11,504 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7d1cd1b019b42b08de1fd202f5509a0/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:11,511 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,511 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742012_1188, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_4.json 2018-07-21T05:32:11,519 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,577 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742013_1189, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/457bcb02f40445bd8470c34bc9064b91/5_descriptor.json 2018-07-21T05:32:11,585 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/457bcb02f40445bd8470c34bc9064b91/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:11,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,589 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742014_1190, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/5_index.zip 2018-07-21T05:32:11,599 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/457bcb02f40445bd8470c34bc9064b91/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,613 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,613 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,613 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,614 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742015_1191, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_5.json 2018-07-21T05:32:11,621 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,663 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,664 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742016_1192, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1083d3e532eb4adbb4d2926ced06e528/6_descriptor.json 2018-07-21T05:32:11,672 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1083d3e532eb4adbb4d2926ced06e528/6_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,675 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,675 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,676 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,676 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742017_1193, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/6_index.zip 2018-07-21T05:32:11,683 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1083d3e532eb4adbb4d2926ced06e528/6_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,694 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,694 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742018_1194, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_6.json 2018-07-21T05:32:11,701 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_6.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,745 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,745 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742019_1195, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/515767ad30a5455aa9aba4ee37a04b49/7_descriptor.json 2018-07-21T05:32:11,752 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/515767ad30a5455aa9aba4ee37a04b49/7_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,759 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742020_1196, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/7_index.zip 2018-07-21T05:32:11,766 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/515767ad30a5455aa9aba4ee37a04b49/7_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:11,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,774 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742021_1197, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_7.json 2018-07-21T05:32:11,780 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_7.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,823 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:11,824 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,824 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742022_1198, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9c6340f35a24ae89c2b93a9efac9dba/8_descriptor.json 2018-07-21T05:32:11,829 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9c6340f35a24ae89c2b93a9efac9dba/8_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:11,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,833 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742023_1199, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/8_index.zip 2018-07-21T05:32:11,839 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9c6340f35a24ae89c2b93a9efac9dba/8_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:11,845 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:11,846 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,846 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742024_1200, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_8.json 2018-07-21T05:32:11,852 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_8.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:11,898 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,898 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742025_1201, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/92e641bf8a2a44068ad6ea92a4ae1907/9_descriptor.json 2018-07-21T05:32:11,904 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/92e641bf8a2a44068ad6ea92a4ae1907/9_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:11,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:11,907 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742026_1202, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/9_index.zip 2018-07-21T05:32:11,914 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/92e641bf8a2a44068ad6ea92a4ae1907/9_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:11,920 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:11,921 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742027_1203, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_9.json 2018-07-21T05:32:12,331 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_9.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,343 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,370 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,371 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,371 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,371 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742028_1204, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2a21e2a853643dbbb1b70c3566c8a12/10_descriptor.json 2018-07-21T05:32:12,379 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2a21e2a853643dbbb1b70c3566c8a12/10_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,383 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,383 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742029_1205, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/10_index.zip 2018-07-21T05:32:12,389 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2a21e2a853643dbbb1b70c3566c8a12/10_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,395 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,395 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:12,396 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,396 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742030_1206, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_10.json 2018-07-21T05:32:12,412 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_10.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:12,470 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,470 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742031_1207, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b7fa57ff3384ec4847585f2bf2fd396/11_descriptor.json 2018-07-21T05:32:12,478 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b7fa57ff3384ec4847585f2bf2fd396/11_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,483 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,483 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742032_1208, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/11_index.zip 2018-07-21T05:32:12,491 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b7fa57ff3384ec4847585f2bf2fd396/11_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,499 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,500 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742033_1209, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_11.json 2018-07-21T05:32:12,507 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_11.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,547 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,547 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,547 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,547 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,547 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,548 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,548 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,548 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,548 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742034_1210, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8142fcb74f7a40499e0f9bb590ce0128/12_descriptor.json 2018-07-21T05:32:12,556 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8142fcb74f7a40499e0f9bb590ce0128/12_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,560 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,560 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742035_1211, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/12_index.zip 2018-07-21T05:32:12,566 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8142fcb74f7a40499e0f9bb590ce0128/12_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,572 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,572 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742036_1212, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_12.json 2018-07-21T05:32:12,579 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_12.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,616 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742037_1213, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de923acb795b447398e67f27ddcee68f/13_descriptor.json 2018-07-21T05:32:12,624 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de923acb795b447398e67f27ddcee68f/13_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,628 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,628 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742038_1214, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/13_index.zip 2018-07-21T05:32:12,635 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de923acb795b447398e67f27ddcee68f/13_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,641 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,641 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742039_1215, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_13.json 2018-07-21T05:32:12,647 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_13.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,682 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,683 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742040_1216, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/35d1000c304c49f0af7d5e1823feff52/14_descriptor.json 2018-07-21T05:32:12,692 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/35d1000c304c49f0af7d5e1823feff52/14_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:12,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,695 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742041_1217, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/14_index.zip 2018-07-21T05:32:12,701 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/35d1000c304c49f0af7d5e1823feff52/14_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:12,706 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,706 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742042_1218, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_14.json 2018-07-21T05:32:12,711 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_14.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,748 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742043_1219, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/631102de49bf46d8b4eedce4e0e51940/15_descriptor.json 2018-07-21T05:32:12,753 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/631102de49bf46d8b4eedce4e0e51940/15_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,756 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,756 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742044_1220, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/15_index.zip 2018-07-21T05:32:12,761 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/631102de49bf46d8b4eedce4e0e51940/15_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,767 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,768 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,768 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742045_1221, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_15.json 2018-07-21T05:32:12,774 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_15.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,803 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,803 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,803 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,804 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742046_1222, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6404c5c2e964d89b9dbe0317ed6545e/16_descriptor.json 2018-07-21T05:32:12,809 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6404c5c2e964d89b9dbe0317ed6545e/16_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:12,812 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,812 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742047_1223, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/16_index.zip 2018-07-21T05:32:12,817 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6404c5c2e964d89b9dbe0317ed6545e/16_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,822 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,822 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742048_1224, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_16.json 2018-07-21T05:32:12,827 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_16.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:12,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,860 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,860 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,860 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:12,860 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,860 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742049_1225, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6bd3708e0e054c4996f7f1f0fe996dfb/17_descriptor.json 2018-07-21T05:32:12,865 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6bd3708e0e054c4996f7f1f0fe996dfb/17_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,868 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,868 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742050_1226, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/17_index.zip 2018-07-21T05:32:12,873 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6bd3708e0e054c4996f7f1f0fe996dfb/17_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,879 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,879 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742051_1227, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_17.json 2018-07-21T05:32:12,885 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_17.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:12,917 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,918 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742052_1228, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f288ec15e6a34d7fbecda15562f903b8/18_descriptor.json 2018-07-21T05:32:12,923 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f288ec15e6a34d7fbecda15562f903b8/18_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,926 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,926 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:12,927 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,927 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742053_1229, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/18_index.zip 2018-07-21T05:32:12,932 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f288ec15e6a34d7fbecda15562f903b8/18_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,936 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,936 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:12,937 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,937 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742054_1230, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_18.json 2018-07-21T05:32:12,942 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_18.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,971 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742055_1231, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6fdfb0593964b19b753fa219b0ccd5b/19_descriptor.json 2018-07-21T05:32:12,976 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6fdfb0593964b19b753fa219b0ccd5b/19_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:12,980 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:12,980 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742056_1232, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/19_index.zip 2018-07-21T05:32:12,986 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6fdfb0593964b19b753fa219b0ccd5b/19_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:12,991 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:12,992 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742057_1233, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_19.json 2018-07-21T05:32:12,997 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_19.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:13,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,029 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742058_1234, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/02c59d938b8b4a7ca00f5393e72ff91a/20_descriptor.json 2018-07-21T05:32:13,035 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/02c59d938b8b4a7ca00f5393e72ff91a/20_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:13,038 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,038 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742059_1235, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/20_index.zip 2018-07-21T05:32:13,044 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/02c59d938b8b4a7ca00f5393e72ff91a/20_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:13,049 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,049 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742060_1236, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_20.json 2018-07-21T05:32:13,055 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_20.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:13,087 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,088 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742061_1237, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b82415b06fb43c4b8e751f88203c8e0/21_descriptor.json 2018-07-21T05:32:13,495 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b82415b06fb43c4b8e751f88203c8e0/21_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,498 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,498 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,499 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742062_1238, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/21_index.zip 2018-07-21T05:32:13,504 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b82415b06fb43c4b8e751f88203c8e0/21_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,510 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,510 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742063_1239, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_21.json 2018-07-21T05:32:13,520 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_21.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:13,549 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,549 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742064_1240, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df8804bf856488cb05da91300ab1500/22_descriptor.json 2018-07-21T05:32:13,559 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df8804bf856488cb05da91300ab1500/22_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,568 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,568 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742065_1241, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/22_index.zip 2018-07-21T05:32:13,575 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df8804bf856488cb05da91300ab1500/22_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:13,581 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,581 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742066_1242, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_22.json 2018-07-21T05:32:13,588 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_22.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,620 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,620 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,620 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:13,620 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,620 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742067_1243, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a0296b19eef64c45993fe5ac4ab4d266/23_descriptor.json 2018-07-21T05:32:13,627 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a0296b19eef64c45993fe5ac4ab4d266/23_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,631 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,631 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742068_1244, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/23_index.zip 2018-07-21T05:32:13,638 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a0296b19eef64c45993fe5ac4ab4d266/23_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:13,644 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,644 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742069_1245, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_23.json 2018-07-21T05:32:13,650 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_23.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,679 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,680 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:13,680 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,680 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742070_1246, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59f95490a0e64d2395bd2bbe04529e94/24_descriptor.json 2018-07-21T05:32:13,685 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59f95490a0e64d2395bd2bbe04529e94/24_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,688 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,688 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742071_1247, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/24_index.zip 2018-07-21T05:32:13,697 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59f95490a0e64d2395bd2bbe04529e94/24_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,703 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,703 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742072_1248, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_24.json 2018-07-21T05:32:13,709 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_24.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,741 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742073_1249, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/67652d91d60e4c178de556e30fa20048/25_descriptor.json 2018-07-21T05:32:13,752 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/67652d91d60e4c178de556e30fa20048/25_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,756 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,756 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,756 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:13,757 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,757 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742074_1250, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/25_index.zip 2018-07-21T05:32:13,768 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/67652d91d60e4c178de556e30fa20048/25_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,774 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,774 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742075_1251, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_25.json 2018-07-21T05:32:13,780 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_25.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:13,811 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,811 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742076_1252, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae1821822add41fd98b4d9876506d46c/26_descriptor.json 2018-07-21T05:32:13,820 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae1821822add41fd98b4d9876506d46c/26_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,825 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742077_1253, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/26_index.zip 2018-07-21T05:32:13,831 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae1821822add41fd98b4d9876506d46c/26_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:13,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,837 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742078_1254, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_26.json 2018-07-21T05:32:13,843 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_26.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,878 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,878 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742079_1255, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f70e9c2ed018439f94441f0156f320b5/27_descriptor.json 2018-07-21T05:32:13,887 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f70e9c2ed018439f94441f0156f320b5/27_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:13,890 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,891 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742080_1256, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/27_index.zip 2018-07-21T05:32:13,899 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f70e9c2ed018439f94441f0156f320b5/27_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,904 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:13,905 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,905 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742081_1257, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_27.json 2018-07-21T05:32:13,910 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_27.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:13,938 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,938 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742082_1258, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16859759cdde49fa8c38991887af215a/28_descriptor.json 2018-07-21T05:32:13,946 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16859759cdde49fa8c38991887af215a/28_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:13,952 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:13,952 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742083_1259, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/28_index.zip 2018-07-21T05:32:13,958 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16859759cdde49fa8c38991887af215a/28_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:13,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:13,963 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742084_1260, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_28.json 2018-07-21T05:32:13,969 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_28.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:13,999 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:13,999 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:14,000 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:14,000 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742085_1261, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79b1d79be43648e0b89b7b8c14cae887/29_descriptor.json 2018-07-21T05:32:14,006 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79b1d79be43648e0b89b7b8c14cae887/29_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:14,009 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:14,009 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742086_1262, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/29_index.zip 2018-07-21T05:32:14,416 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79b1d79be43648e0b89b7b8c14cae887/29_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:14,421 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,421 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742087_1263, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_29.json 2018-07-21T05:32:14,430 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_29.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:14,459 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,459 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742088_1264, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10713e87c65a474c8a7c788f6692fd5c/30_descriptor.json 2018-07-21T05:32:14,465 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10713e87c65a474c8a7c788f6692fd5c/30_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:14,468 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:14,468 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742089_1265, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/30_index.zip 2018-07-21T05:32:14,476 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10713e87c65a474c8a7c788f6692fd5c/30_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:14,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,482 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742090_1266, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_30.json 2018-07-21T05:32:14,490 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_30.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:14,529 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,530 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742091_1267, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68df868e7fd940b1bf21553abd3d54f1/31_descriptor.json 2018-07-21T05:32:14,536 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68df868e7fd940b1bf21553abd3d54f1/31_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,540 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,540 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,540 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:14,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:14,541 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742092_1268, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/31_index.zip 2018-07-21T05:32:14,546 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68df868e7fd940b1bf21553abd3d54f1/31_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,555 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,555 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,555 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:14,555 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:14,555 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:14,556 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:14,556 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742093_1269, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_31.json 2018-07-21T05:32:14,963 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_31.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:14,994 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:14,994 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742094_1270, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17f7ef0370c448fe95ff6b7eca8ee0d8/32_descriptor.json 2018-07-21T05:32:15,000 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17f7ef0370c448fe95ff6b7eca8ee0d8/32_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,003 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,003 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,004 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742095_1271, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/32_index.zip 2018-07-21T05:32:15,009 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17f7ef0370c448fe95ff6b7eca8ee0d8/32_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,015 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,015 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742096_1272, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_32.json 2018-07-21T05:32:15,021 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_32.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:15,050 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,050 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742097_1273, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca05889e95d488a94ffb72dcd2c8ec2/33_descriptor.json 2018-07-21T05:32:15,056 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca05889e95d488a94ffb72dcd2c8ec2/33_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:15,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,059 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742098_1274, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/33_index.zip 2018-07-21T05:32:15,065 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca05889e95d488a94ffb72dcd2c8ec2/33_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,069 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,070 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742099_1275, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_33.json 2018-07-21T05:32:15,075 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_33.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,109 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742100_1276, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9bebaacd52d4f8ca50b9ae4d7476101/34_descriptor.json 2018-07-21T05:32:15,114 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9bebaacd52d4f8ca50b9ae4d7476101/34_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,117 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,117 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,118 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,118 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742101_1277, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/34_index.zip 2018-07-21T05:32:15,123 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9bebaacd52d4f8ca50b9ae4d7476101/34_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,128 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,128 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742102_1278, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_34.json 2018-07-21T05:32:15,358 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:15,539 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_34.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,569 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,569 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742103_1279, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fb6c39fb2bc429db3305346fbeacfff/35_descriptor.json 2018-07-21T05:32:15,577 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fb6c39fb2bc429db3305346fbeacfff/35_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,580 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,580 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,581 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,581 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742104_1280, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/35_index.zip 2018-07-21T05:32:15,587 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fb6c39fb2bc429db3305346fbeacfff/35_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,593 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,593 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742105_1281, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_35.json 2018-07-21T05:32:15,599 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_35.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,629 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,629 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742106_1282, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/541905040f8d4d7ab09cd05c898af126/36_descriptor.json 2018-07-21T05:32:15,636 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/541905040f8d4d7ab09cd05c898af126/36_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,641 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,642 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,642 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742107_1283, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/36_index.zip 2018-07-21T05:32:15,654 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/541905040f8d4d7ab09cd05c898af126/36_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,661 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,661 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742108_1284, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_36.json 2018-07-21T05:32:15,667 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_36.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,699 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,700 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742109_1285, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c85642082b947f4a385db3fda8cd31e/37_descriptor.json 2018-07-21T05:32:15,705 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c85642082b947f4a385db3fda8cd31e/37_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,708 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,708 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742110_1286, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/37_index.zip 2018-07-21T05:32:15,714 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c85642082b947f4a385db3fda8cd31e/37_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,721 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,721 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742111_1287, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_37.json 2018-07-21T05:32:15,726 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_37.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:15,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,755 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742112_1288, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4e6adb67f264ed383af242c3eb69494/38_descriptor.json 2018-07-21T05:32:15,761 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4e6adb67f264ed383af242c3eb69494/38_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,764 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,764 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742113_1289, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/38_index.zip 2018-07-21T05:32:15,770 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4e6adb67f264ed383af242c3eb69494/38_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,774 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,774 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742114_1290, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_38.json 2018-07-21T05:32:15,779 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_38.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,806 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,806 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742115_1291, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/454bb18c63c84184af6ce12ebdb177b9/39_descriptor.json 2018-07-21T05:32:15,813 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/454bb18c63c84184af6ce12ebdb177b9/39_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:15,817 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,817 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742116_1292, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/39_index.zip 2018-07-21T05:32:15,823 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/454bb18c63c84184af6ce12ebdb177b9/39_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,828 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,829 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742117_1293, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_39.json 2018-07-21T05:32:15,834 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_39.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:15,868 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,868 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742118_1294, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05de51456cd4688b7cbc945bf9904b5/40_descriptor.json 2018-07-21T05:32:15,877 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05de51456cd4688b7cbc945bf9904b5/40_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,880 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,881 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742119_1295, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/40_index.zip 2018-07-21T05:32:15,887 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05de51456cd4688b7cbc945bf9904b5/40_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,892 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,892 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742120_1296, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_40.json 2018-07-21T05:32:15,900 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_40.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:15,931 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,931 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742121_1297, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3779a538428d485384ea59fb3eb2eefd/41_descriptor.json 2018-07-21T05:32:15,938 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3779a538428d485384ea59fb3eb2eefd/41_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:15,941 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742122_1298, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/41_index.zip 2018-07-21T05:32:15,948 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3779a538428d485384ea59fb3eb2eefd/41_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,952 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:15,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:15,953 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742123_1299, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_41.json 2018-07-21T05:32:15,959 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_41.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:15,987 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,987 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742124_1300, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c24530e35b9416e847cdf3d7c4478d4/42_descriptor.json 2018-07-21T05:32:15,993 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c24530e35b9416e847cdf3d7c4478d4/42_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:15,996 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:15,996 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742125_1301, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/42_index.zip 2018-07-21T05:32:16,002 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c24530e35b9416e847cdf3d7c4478d4/42_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,008 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742126_1302, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_42.json 2018-07-21T05:32:16,013 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_42.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,038 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,038 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742127_1303, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638bf6699098418992aeeef2a8c29a2f/43_descriptor.json 2018-07-21T05:32:16,044 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638bf6699098418992aeeef2a8c29a2f/43_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,047 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742128_1304, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/43_index.zip 2018-07-21T05:32:16,053 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638bf6699098418992aeeef2a8c29a2f/43_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,058 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,058 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742129_1305, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_43.json 2018-07-21T05:32:16,063 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_43.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,087 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,087 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742130_1306, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a161bdd9e45469c96a550b533e83f8d/44_descriptor.json 2018-07-21T05:32:16,092 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a161bdd9e45469c96a550b533e83f8d/44_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:16,095 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,095 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742131_1307, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/44_index.zip 2018-07-21T05:32:16,100 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a161bdd9e45469c96a550b533e83f8d/44_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:16,105 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,105 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742132_1308, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_44.json 2018-07-21T05:32:16,110 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_44.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:16,137 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,137 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742133_1309, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a56ca5a92ab40abaacfc81c8c58755a/45_descriptor.json 2018-07-21T05:32:16,143 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a56ca5a92ab40abaacfc81c8c58755a/45_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:16,146 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,146 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742134_1310, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/45_index.zip 2018-07-21T05:32:16,151 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a56ca5a92ab40abaacfc81c8c58755a/45_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,156 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,156 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742135_1311, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_45.json 2018-07-21T05:32:16,161 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_45.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,184 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,185 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,185 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:16,185 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,185 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742136_1312, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/344e471711ad4292a2d704d2ab5922b7/46_descriptor.json 2018-07-21T05:32:16,191 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/344e471711ad4292a2d704d2ab5922b7/46_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:16,194 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,195 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742137_1313, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/46_index.zip 2018-07-21T05:32:16,202 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/344e471711ad4292a2d704d2ab5922b7/46_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,206 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:16,207 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,207 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742138_1314, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_46.json 2018-07-21T05:32:16,615 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_46.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:16,652 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,653 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742139_1315, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ba328ec1004ab68fb025d8c7a1134a/47_descriptor.json 2018-07-21T05:32:16,658 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ba328ec1004ab68fb025d8c7a1134a/47_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,661 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,661 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742140_1316, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/47_index.zip 2018-07-21T05:32:16,666 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ba328ec1004ab68fb025d8c7a1134a/47_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,672 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742141_1317, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_47.json 2018-07-21T05:32:16,677 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_47.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:16,722 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:16,722 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742142_1318, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d91c7f8a5e0946d7a81eb05f2cf36262/48_descriptor.json 2018-07-21T05:32:16,729 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d91c7f8a5e0946d7a81eb05f2cf36262/48_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:16,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,732 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742143_1319, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/48_index.zip 2018-07-21T05:32:16,738 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d91c7f8a5e0946d7a81eb05f2cf36262/48_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,742 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:16,743 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,743 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742144_1320, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_48.json 2018-07-21T05:32:16,748 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_48.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:16,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:16,789 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742145_1321, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1931cceae2194159927aac560b2d01a8/49_descriptor.json 2018-07-21T05:32:17,207 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1931cceae2194159927aac560b2d01a8/49_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:17,210 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,210 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742146_1322, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/49_index.zip 2018-07-21T05:32:17,216 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1931cceae2194159927aac560b2d01a8/49_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,221 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,221 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742147_1323, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_49.json 2018-07-21T05:32:17,228 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_49.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,253 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742148_1324, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62ca7fedf25b437bbf6d8c04a02f7f72/50_descriptor.json 2018-07-21T05:32:17,259 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62ca7fedf25b437bbf6d8c04a02f7f72/50_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:17,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,262 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742149_1325, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/50_index.zip 2018-07-21T05:32:17,267 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62ca7fedf25b437bbf6d8c04a02f7f72/50_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,272 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,272 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742150_1326, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_50.json 2018-07-21T05:32:17,280 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_50.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,306 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,306 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742151_1327, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/428af4e565364c38beb5c3d2956414cc/51_descriptor.json 2018-07-21T05:32:17,311 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/428af4e565364c38beb5c3d2956414cc/51_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,314 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,314 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742152_1328, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/51_index.zip 2018-07-21T05:32:17,319 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/428af4e565364c38beb5c3d2956414cc/51_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:17,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,324 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742153_1329, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_51.json 2018-07-21T05:32:17,329 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_51.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,351 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:17,351 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742154_1330, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2b8a3ef5bd94a5ea427fd2c7dfc86ca/52_descriptor.json 2018-07-21T05:32:17,356 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2b8a3ef5bd94a5ea427fd2c7dfc86ca/52_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,358 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:17,359 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,359 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742155_1331, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/52_index.zip 2018-07-21T05:32:17,363 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2b8a3ef5bd94a5ea427fd2c7dfc86ca/52_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:17,368 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,368 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742156_1332, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_52.json 2018-07-21T05:32:17,374 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_52.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:17,398 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,398 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742157_1333, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5c34729f7864ba89dd8fbef120c7198/53_descriptor.json 2018-07-21T05:32:17,403 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5c34729f7864ba89dd8fbef120c7198/53_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:17,406 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,406 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742158_1334, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/53_index.zip 2018-07-21T05:32:17,412 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5c34729f7864ba89dd8fbef120c7198/53_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:17,417 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:17,417 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742159_1335, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_53.json 2018-07-21T05:32:17,825 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_53.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,851 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,851 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:17,852 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:17,852 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742160_1336, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c253430fa064e7296181eb59b7d2788/54_descriptor.json 2018-07-21T05:32:17,860 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c253430fa064e7296181eb59b7d2788/54_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:17,869 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,870 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742161_1337, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/54_index.zip 2018-07-21T05:32:17,883 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c253430fa064e7296181eb59b7d2788/54_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:17,888 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,889 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742162_1338, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_54.json 2018-07-21T05:32:17,902 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_54.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:17,929 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:17,929 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742163_1339, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/984d69fc60d9444d82b4474cd5315066/55_descriptor.json 2018-07-21T05:32:18,337 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/984d69fc60d9444d82b4474cd5315066/55_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:18,341 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,341 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742164_1340, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/55_index.zip 2018-07-21T05:32:18,348 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/984d69fc60d9444d82b4474cd5315066/55_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,353 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,354 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742165_1341, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_55.json 2018-07-21T05:32:18,359 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_55.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,373 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:18,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,386 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742166_1342, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eac2b252c0cb42b2aab1882f873de4c0/56_descriptor.json 2018-07-21T05:32:18,393 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eac2b252c0cb42b2aab1882f873de4c0/56_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,396 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,396 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742167_1343, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/56_index.zip 2018-07-21T05:32:18,403 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eac2b252c0cb42b2aab1882f873de4c0/56_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:18,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,409 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742168_1344, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_56.json 2018-07-21T05:32:18,415 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_56.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,440 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,441 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742169_1345, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c76a2f877b422996dcd0c936f3d175/57_descriptor.json 2018-07-21T05:32:18,448 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c76a2f877b422996dcd0c936f3d175/57_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:18,453 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,453 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,453 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,453 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:18,453 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,453 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742170_1346, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/57_index.zip 2018-07-21T05:32:18,862 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c76a2f877b422996dcd0c936f3d175/57_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,868 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742171_1347, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_57.json 2018-07-21T05:32:18,873 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_57.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,895 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,895 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:18,896 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,896 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742172_1348, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/efa4d25a8464407385bda8456d246102/58_descriptor.json 2018-07-21T05:32:18,901 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/efa4d25a8464407385bda8456d246102/58_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:18,904 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,904 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742173_1349, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/58_index.zip 2018-07-21T05:32:18,912 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/efa4d25a8464407385bda8456d246102/58_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:18,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,917 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742174_1350, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_58.json 2018-07-21T05:32:18,921 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_58.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,945 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,945 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742175_1351, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ffe62d0a72444bca9633b48028b03ebc/59_descriptor.json 2018-07-21T05:32:18,950 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ffe62d0a72444bca9633b48028b03ebc/59_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,952 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,952 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:18,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:18,953 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742176_1352, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/59_index.zip 2018-07-21T05:32:18,958 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ffe62d0a72444bca9633b48028b03ebc/59_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:18,962 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:18,963 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742177_1353, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_59.json 2018-07-21T05:32:18,968 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_59.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:18,991 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:18,991 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742178_1354, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a15167ca71d422eb91b4df9c98788f3/60_descriptor.json 2018-07-21T05:32:18,997 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a15167ca71d422eb91b4df9c98788f3/60_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,002 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,002 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742179_1355, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/60_index.zip 2018-07-21T05:32:19,008 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a15167ca71d422eb91b4df9c98788f3/60_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,013 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742180_1356, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_60.json 2018-07-21T05:32:19,019 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_60.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:19,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,049 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742181_1357, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64dc3d79fa0e4a71a461069435f68703/61_descriptor.json 2018-07-21T05:32:19,055 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64dc3d79fa0e4a71a461069435f68703/61_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:19,057 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,058 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742182_1358, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/61_index.zip 2018-07-21T05:32:19,063 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64dc3d79fa0e4a71a461069435f68703/61_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:19,068 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,068 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742183_1359, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_61.json 2018-07-21T05:32:19,073 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_61.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,096 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742184_1360, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71b6c075db7644b4b205a27d41cb777f/62_descriptor.json 2018-07-21T05:32:19,101 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71b6c075db7644b4b205a27d41cb777f/62_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:19,104 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,104 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742185_1361, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/62_index.zip 2018-07-21T05:32:19,109 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71b6c075db7644b4b205a27d41cb777f/62_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:19,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,114 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742186_1362, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_62.json 2018-07-21T05:32:19,237 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:32:19,300 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:32:19,522 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_62.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,548 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,548 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742187_1363, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2374a7e1c6f94f978ae0bb7070a59b16/63_descriptor.json 2018-07-21T05:32:19,557 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2374a7e1c6f94f978ae0bb7070a59b16/63_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,561 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742188_1364, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/63_index.zip 2018-07-21T05:32:19,566 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2374a7e1c6f94f978ae0bb7070a59b16/63_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,571 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,571 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742189_1365, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_63.json 2018-07-21T05:32:19,576 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_63.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,598 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,598 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742190_1366, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2fa4532f6da2489686255c51590737b3/64_descriptor.json 2018-07-21T05:32:19,604 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2fa4532f6da2489686255c51590737b3/64_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,607 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,607 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742191_1367, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/64_index.zip 2018-07-21T05:32:19,613 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2fa4532f6da2489686255c51590737b3/64_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,617 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,617 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,618 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742192_1368, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_64.json 2018-07-21T05:32:19,623 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_64.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:19,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,648 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742193_1369, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69c01653ac374568abdfc655ba3e45fb/65_descriptor.json 2018-07-21T05:32:19,655 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69c01653ac374568abdfc655ba3e45fb/65_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:19,662 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,662 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742194_1370, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/65_index.zip 2018-07-21T05:32:19,670 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69c01653ac374568abdfc655ba3e45fb/65_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:19,677 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,678 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,678 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,678 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742195_1371, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_65.json 2018-07-21T05:32:19,687 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_65.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:19,711 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,711 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742196_1372, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb253e0262b24c9496ddd3344459eba2/66_descriptor.json 2018-07-21T05:32:19,717 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb253e0262b24c9496ddd3344459eba2/66_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:19,720 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,720 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742197_1373, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/66_index.zip 2018-07-21T05:32:19,729 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb253e0262b24c9496ddd3344459eba2/66_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:19,737 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,737 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742198_1374, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_66.json 2018-07-21T05:32:19,743 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_66.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:19,774 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:19,774 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742199_1375, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f466137bf33402bb94aff8efd9ffd2b/67_descriptor.json 2018-07-21T05:32:19,782 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f466137bf33402bb94aff8efd9ffd2b/67_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,786 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,786 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:19,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:19,787 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742200_1376, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/67_index.zip 2018-07-21T05:32:19,799 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f466137bf33402bb94aff8efd9ffd2b/67_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:19,810 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:19,810 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742201_1377, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_67.json 2018-07-21T05:32:20,218 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_67.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:20,247 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,248 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742202_1378, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0151d1adae214797b3f17f42be8a3e71/68_descriptor.json 2018-07-21T05:32:20,253 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0151d1adae214797b3f17f42be8a3e71/68_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,259 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,259 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742203_1379, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/68_index.zip 2018-07-21T05:32:20,264 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0151d1adae214797b3f17f42be8a3e71/68_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:20,269 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,270 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742204_1380, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_68.json 2018-07-21T05:32:20,275 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_68.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,297 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742205_1381, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c7464fe074cf44a197a0fa2661c2f709/69_descriptor.json 2018-07-21T05:32:20,704 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c7464fe074cf44a197a0fa2661c2f709/69_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,707 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,707 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:20,708 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,708 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742206_1382, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/69_index.zip 2018-07-21T05:32:20,713 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c7464fe074cf44a197a0fa2661c2f709/69_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:20,718 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,719 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742207_1383, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_69.json 2018-07-21T05:32:20,724 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_69.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,748 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,748 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742208_1384, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fe8127fe4d6849868fb131c6c73194ad/70_descriptor.json 2018-07-21T05:32:20,756 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fe8127fe4d6849868fb131c6c73194ad/70_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,759 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,759 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,759 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:20,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,760 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742209_1385, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/70_index.zip 2018-07-21T05:32:20,765 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fe8127fe4d6849868fb131c6c73194ad/70_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,771 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,771 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742210_1386, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_70.json 2018-07-21T05:32:20,776 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_70.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,798 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,798 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742211_1387, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6675d372a7b463dadc433f69e823646/71_descriptor.json 2018-07-21T05:32:20,803 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6675d372a7b463dadc433f69e823646/71_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,805 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,805 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742212_1388, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/71_index.zip 2018-07-21T05:32:20,810 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6675d372a7b463dadc433f69e823646/71_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:20,814 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,814 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742213_1389, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_71.json 2018-07-21T05:32:20,818 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_71.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,839 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,839 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742214_1390, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4bec0d73cb8f4d028ebec116d9bfcbee/72_descriptor.json 2018-07-21T05:32:20,844 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4bec0d73cb8f4d028ebec116d9bfcbee/72_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:20,846 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,846 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742215_1391, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/72_index.zip 2018-07-21T05:32:20,853 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4bec0d73cb8f4d028ebec116d9bfcbee/72_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,857 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742216_1392, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_72.json 2018-07-21T05:32:20,862 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_72.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:20,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:20,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,883 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742217_1393, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9ae73efad734d95bb90c4852719c6f8/73_descriptor.json 2018-07-21T05:32:20,892 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9ae73efad734d95bb90c4852719c6f8/73_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,894 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,894 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:20,895 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,895 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742218_1394, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/73_index.zip 2018-07-21T05:32:20,900 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a9ae73efad734d95bb90c4852719c6f8/73_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,907 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,907 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,907 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,907 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,908 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,908 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,908 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:20,908 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,908 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742219_1395, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_73.json 2018-07-21T05:32:20,913 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_73.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,936 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,936 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742220_1396, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e21334a5b704ff0a3aab6f2445a03a5/74_descriptor.json 2018-07-21T05:32:20,941 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e21334a5b704ff0a3aab6f2445a03a5/74_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,944 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:20,944 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742221_1397, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/74_index.zip 2018-07-21T05:32:20,949 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e21334a5b704ff0a3aab6f2445a03a5/74_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:20,953 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,954 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742222_1398, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_74.json 2018-07-21T05:32:20,959 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_74.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:20,986 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,986 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742223_1399, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e4d342dbee949dcaa93f59aa2fca09c/75_descriptor.json 2018-07-21T05:32:20,991 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e4d342dbee949dcaa93f59aa2fca09c/75_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:20,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:20,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:20,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:20,994 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742224_1400, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/75_index.zip 2018-07-21T05:32:20,998 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e4d342dbee949dcaa93f59aa2fca09c/75_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:21,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,003 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742225_1401, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_75.json 2018-07-21T05:32:21,007 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_75.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:21,027 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,027 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742226_1402, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38e5662e26ba47b09a2a96e95a686211/76_descriptor.json 2018-07-21T05:32:21,032 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38e5662e26ba47b09a2a96e95a686211/76_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:21,035 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,035 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742227_1403, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/76_index.zip 2018-07-21T05:32:21,039 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38e5662e26ba47b09a2a96e95a686211/76_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,043 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:21,044 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,044 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742228_1404, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_76.json 2018-07-21T05:32:21,049 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_76.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:21,069 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,069 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742229_1405, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/612989484fe141d0985a2e616e13e0f1/77_descriptor.json 2018-07-21T05:32:21,074 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/612989484fe141d0985a2e616e13e0f1/77_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:21,076 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,077 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742230_1406, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/77_index.zip 2018-07-21T05:32:21,081 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/612989484fe141d0985a2e616e13e0f1/77_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,085 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,085 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,085 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:21,086 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,086 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742231_1407, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_77.json 2018-07-21T05:32:21,090 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_77.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:21,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,115 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742232_1408, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e55fa4bb7e44c5917c5986f463a9db/78_descriptor.json 2018-07-21T05:32:21,119 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e55fa4bb7e44c5917c5986f463a9db/78_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:21,122 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,122 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742233_1409, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/78_index.zip 2018-07-21T05:32:21,127 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e55fa4bb7e44c5917c5986f463a9db/78_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:21,131 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,131 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742234_1410, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_78.json 2018-07-21T05:32:21,135 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_78.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:21,158 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,158 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742235_1411, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c9cd21686a774cf2a78a6d5c9eff37aa/79_descriptor.json 2018-07-21T05:32:21,164 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c9cd21686a774cf2a78a6d5c9eff37aa/79_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:21,167 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,167 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742236_1412, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/79_index.zip 2018-07-21T05:32:21,172 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c9cd21686a774cf2a78a6d5c9eff37aa/79_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:21,177 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,177 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742237_1413, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_79.json 2018-07-21T05:32:21,182 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_79.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:21,207 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,207 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742238_1414, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a635fb7dfd30442b8f698f2fe4198682/80_descriptor.json 2018-07-21T05:32:21,213 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a635fb7dfd30442b8f698f2fe4198682/80_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:21,216 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,216 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742239_1415, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/80_index.zip 2018-07-21T05:32:21,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:21,623 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a635fb7dfd30442b8f698f2fe4198682/80_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:21,628 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,628 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,628 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:21,629 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:21,629 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:21,629 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:21,629 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:21,629 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:21,629 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742240_1416, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_80.json 2018-07-21T05:32:22,036 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_80.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,058 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,059 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742241_1417, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/04af60355a3d4d1d87e7cb83a1acef88/81_descriptor.json 2018-07-21T05:32:22,063 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/04af60355a3d4d1d87e7cb83a1acef88/81_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,066 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742242_1418, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/81_index.zip 2018-07-21T05:32:22,071 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/04af60355a3d4d1d87e7cb83a1acef88/81_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,075 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742243_1419, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_81.json 2018-07-21T05:32:22,080 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_81.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:22,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,109 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742244_1420, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/624a485cf4db4ad4b3371896df0b2881/82_descriptor.json 2018-07-21T05:32:22,518 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/624a485cf4db4ad4b3371896df0b2881/82_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,521 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,521 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742245_1421, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/82_index.zip 2018-07-21T05:32:22,526 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/624a485cf4db4ad4b3371896df0b2881/82_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,530 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,530 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742246_1422, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_82.json 2018-07-21T05:32:22,535 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_82.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,558 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,559 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742247_1423, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762de6df456c4d4eb44ee034dd18275f/83_descriptor.json 2018-07-21T05:32:22,564 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762de6df456c4d4eb44ee034dd18275f/83_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,566 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,566 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742248_1424, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/83_index.zip 2018-07-21T05:32:22,571 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762de6df456c4d4eb44ee034dd18275f/83_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:22,575 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,575 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742249_1425, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_83.json 2018-07-21T05:32:22,581 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_83.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,601 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,601 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742250_1426, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd82bd9e924745dd8fde4f01a3e7c854/84_descriptor.json 2018-07-21T05:32:22,606 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd82bd9e924745dd8fde4f01a3e7c854/84_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,609 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,609 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742251_1427, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/84_index.zip 2018-07-21T05:32:22,614 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd82bd9e924745dd8fde4f01a3e7c854/84_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,618 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,618 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742252_1428, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_84.json 2018-07-21T05:32:22,623 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_84.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,649 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742253_1429, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c0ed2932c4947b5aa8a0409bcbaf04b/85_descriptor.json 2018-07-21T05:32:22,656 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c0ed2932c4947b5aa8a0409bcbaf04b/85_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,659 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,659 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742254_1430, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/85_index.zip 2018-07-21T05:32:22,665 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c0ed2932c4947b5aa8a0409bcbaf04b/85_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,670 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742255_1431, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_85.json 2018-07-21T05:32:22,676 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_85.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:22,698 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,698 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742256_1432, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d02567a428ac401fbe6d5cd94ad55d0f/86_descriptor.json 2018-07-21T05:32:22,703 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d02567a428ac401fbe6d5cd94ad55d0f/86_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,706 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,706 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742257_1433, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/86_index.zip 2018-07-21T05:32:22,713 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d02567a428ac401fbe6d5cd94ad55d0f/86_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,721 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,721 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742258_1434, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_86.json 2018-07-21T05:32:22,726 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_86.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,748 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,749 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,749 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,749 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,749 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742259_1435, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33bda2eae0e040b38886ed9adaa906b8/87_descriptor.json 2018-07-21T05:32:22,754 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33bda2eae0e040b38886ed9adaa906b8/87_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,757 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,758 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742260_1436, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/87_index.zip 2018-07-21T05:32:22,763 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33bda2eae0e040b38886ed9adaa906b8/87_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,769 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742261_1437, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_87.json 2018-07-21T05:32:22,776 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_87.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,798 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,798 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742262_1438, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5a4d2f637714b1a9ec1b5ee7e3e37a4/88_descriptor.json 2018-07-21T05:32:22,803 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5a4d2f637714b1a9ec1b5ee7e3e37a4/88_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,806 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,807 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742263_1439, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/88_index.zip 2018-07-21T05:32:22,812 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5a4d2f637714b1a9ec1b5ee7e3e37a4/88_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,816 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,816 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742264_1440, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_88.json 2018-07-21T05:32:22,826 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_88.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:22,857 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,857 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742265_1441, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/648bce312f4d44e69be578a70b7c3275/89_descriptor.json 2018-07-21T05:32:22,862 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/648bce312f4d44e69be578a70b7c3275/89_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,864 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,864 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,864 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,864 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,865 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,865 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,865 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,865 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,865 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,865 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742266_1442, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/89_index.zip 2018-07-21T05:32:22,870 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/648bce312f4d44e69be578a70b7c3275/89_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,874 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742267_1443, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_89.json 2018-07-21T05:32:22,878 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_89.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,897 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,897 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742268_1444, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff31576b9ea74bf78aa07ab4a72a474f/90_descriptor.json 2018-07-21T05:32:22,902 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff31576b9ea74bf78aa07ab4a72a474f/90_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,904 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,904 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,905 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,905 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742269_1445, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/90_index.zip 2018-07-21T05:32:22,910 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff31576b9ea74bf78aa07ab4a72a474f/90_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:22,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,914 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742270_1446, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_90.json 2018-07-21T05:32:22,919 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_90.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,941 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,941 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742271_1447, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9464ea290a334e388545401ce4520a67/91_descriptor.json 2018-07-21T05:32:22,947 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9464ea290a334e388545401ce4520a67/91_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:22,950 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742272_1448, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/91_index.zip 2018-07-21T05:32:22,954 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9464ea290a334e388545401ce4520a67/91_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:22,958 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,958 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742273_1449, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_91.json 2018-07-21T05:32:22,963 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_91.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,983 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,984 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742274_1450, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9570ad53e12d4e558eabeefed4adb5af/92_descriptor.json 2018-07-21T05:32:22,989 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9570ad53e12d4e558eabeefed4adb5af/92_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:22,992 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:22,992 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742275_1451, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/92_index.zip 2018-07-21T05:32:22,996 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9570ad53e12d4e558eabeefed4adb5af/92_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,001 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742276_1452, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_92.json 2018-07-21T05:32:23,006 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_92.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,027 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742277_1453, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5428224e00bc4a83b64ed993e05e42e2/93_descriptor.json 2018-07-21T05:32:23,031 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5428224e00bc4a83b64ed993e05e42e2/93_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,033 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,034 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742278_1454, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/93_index.zip 2018-07-21T05:32:23,038 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5428224e00bc4a83b64ed993e05e42e2/93_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,042 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,042 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,042 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,043 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,043 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742279_1455, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_93.json 2018-07-21T05:32:23,047 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_93.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,066 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,066 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742280_1456, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db9faeffb6384b5c9de55be7ef82fc01/94_descriptor.json 2018-07-21T05:32:23,070 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db9faeffb6384b5c9de55be7ef82fc01/94_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,073 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,073 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742281_1457, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/94_index.zip 2018-07-21T05:32:23,077 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db9faeffb6384b5c9de55be7ef82fc01/94_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,082 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742282_1458, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_94.json 2018-07-21T05:32:23,086 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_94.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,107 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,107 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742283_1459, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ee85c6291024c7ba93d4d7e7ae0d0bb/95_descriptor.json 2018-07-21T05:32:23,112 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ee85c6291024c7ba93d4d7e7ae0d0bb/95_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,115 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,115 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742284_1460, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/95_index.zip 2018-07-21T05:32:23,119 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ee85c6291024c7ba93d4d7e7ae0d0bb/95_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,124 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,124 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742285_1461, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_95.json 2018-07-21T05:32:23,134 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_95.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,162 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,162 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742286_1462, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8fc79c3963041f39da78762e93454fd/96_descriptor.json 2018-07-21T05:32:23,167 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8fc79c3963041f39da78762e93454fd/96_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,170 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,170 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742287_1463, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/96_index.zip 2018-07-21T05:32:23,181 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8fc79c3963041f39da78762e93454fd/96_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,185 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742288_1464, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_96.json 2018-07-21T05:32:23,190 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_96.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,209 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,210 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742289_1465, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9aab10a12f3f4ed288bf81fef92af401/97_descriptor.json 2018-07-21T05:32:23,217 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9aab10a12f3f4ed288bf81fef92af401/97_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,220 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,221 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742290_1466, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/97_index.zip 2018-07-21T05:32:23,258 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9aab10a12f3f4ed288bf81fef92af401/97_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,262 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742291_1467, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_97.json 2018-07-21T05:32:23,267 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_97.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,287 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,287 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742292_1468, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8046779b09c47bc8ab34fd899cf01ac/98_descriptor.json 2018-07-21T05:32:23,292 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8046779b09c47bc8ab34fd899cf01ac/98_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,294 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,295 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742293_1469, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/98_index.zip 2018-07-21T05:32:23,300 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8046779b09c47bc8ab34fd899cf01ac/98_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,304 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,304 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742294_1470, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_98.json 2018-07-21T05:32:23,310 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_98.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,330 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,330 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742295_1471, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed159d81f9714cd9bacd4a352e7f44fe/99_descriptor.json 2018-07-21T05:32:23,336 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed159d81f9714cd9bacd4a352e7f44fe/99_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,338 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,338 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,339 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,339 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742296_1472, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/99_index.zip 2018-07-21T05:32:23,344 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed159d81f9714cd9bacd4a352e7f44fe/99_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,348 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742297_1473, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_99.json 2018-07-21T05:32:23,353 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_99.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,375 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,375 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742298_1474, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638be97ca66e47c3b16d5433a33f8ea8/100_descriptor.json 2018-07-21T05:32:23,386 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638be97ca66e47c3b16d5433a33f8ea8/100_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,390 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,390 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742299_1475, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/100_index.zip 2018-07-21T05:32:23,396 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/638be97ca66e47c3b16d5433a33f8ea8/100_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,401 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742300_1476, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_100.json 2018-07-21T05:32:23,406 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_100.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,428 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,428 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742301_1477, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27f6e2af2c3c42f5a15cf9a005180f87/101_descriptor.json 2018-07-21T05:32:23,432 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27f6e2af2c3c42f5a15cf9a005180f87/101_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,435 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,435 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742302_1478, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/101_index.zip 2018-07-21T05:32:23,439 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27f6e2af2c3c42f5a15cf9a005180f87/101_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,443 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,443 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,444 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,444 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742303_1479, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_101.json 2018-07-21T05:32:23,449 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_101.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,471 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,471 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,472 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,472 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742304_1480, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3e3adade71384441ba1f1376c383c070/102_descriptor.json 2018-07-21T05:32:23,477 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3e3adade71384441ba1f1376c383c070/102_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,479 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,479 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742305_1481, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/102_index.zip 2018-07-21T05:32:23,484 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3e3adade71384441ba1f1376c383c070/102_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,489 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,489 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742306_1482, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_102.json 2018-07-21T05:32:23,494 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_102.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,518 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,518 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,518 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,518 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,518 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,518 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742307_1483, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e036d082c0a642eeb657df2624889915/103_descriptor.json 2018-07-21T05:32:23,523 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e036d082c0a642eeb657df2624889915/103_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,525 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,526 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742308_1484, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/103_index.zip 2018-07-21T05:32:23,531 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e036d082c0a642eeb657df2624889915/103_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,534 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742309_1485, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_103.json 2018-07-21T05:32:23,539 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_103.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,569 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,569 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742310_1486, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8bb2df58612486cb9d00a8b0e1617a9/104_descriptor.json 2018-07-21T05:32:23,574 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8bb2df58612486cb9d00a8b0e1617a9/104_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,579 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,579 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742311_1487, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/104_index.zip 2018-07-21T05:32:23,584 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8bb2df58612486cb9d00a8b0e1617a9/104_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,593 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,593 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742312_1488, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_104.json 2018-07-21T05:32:23,598 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_104.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,617 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,617 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742313_1489, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22ab83e835984613b424ae8cb9125ea0/105_descriptor.json 2018-07-21T05:32:23,622 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22ab83e835984613b424ae8cb9125ea0/105_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,624 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,625 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742314_1490, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/105_index.zip 2018-07-21T05:32:23,629 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22ab83e835984613b424ae8cb9125ea0/105_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,633 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742315_1491, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_105.json 2018-07-21T05:32:23,638 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_105.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:23,658 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,658 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742316_1492, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27fe9cec4a0c4e2694ec34a84cc25065/106_descriptor.json 2018-07-21T05:32:23,663 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27fe9cec4a0c4e2694ec34a84cc25065/106_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,666 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,666 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742317_1493, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/106_index.zip 2018-07-21T05:32:23,675 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/27fe9cec4a0c4e2694ec34a84cc25065/106_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:23,680 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,680 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742318_1494, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_106.json 2018-07-21T05:32:23,686 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_106.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:23,707 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,707 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742319_1495, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6f1241fd18e4bbbbc0e57b10e22755a/107_descriptor.json 2018-07-21T05:32:23,713 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6f1241fd18e4bbbbc0e57b10e22755a/107_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,721 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,721 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742320_1496, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/107_index.zip 2018-07-21T05:32:23,726 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6f1241fd18e4bbbbc0e57b10e22755a/107_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,731 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742321_1497, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_107.json 2018-07-21T05:32:23,738 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_107.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:23,760 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:23,760 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742322_1498, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d166d9c12c054565b32921ad905037e3/108_descriptor.json 2018-07-21T05:32:23,766 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d166d9c12c054565b32921ad905037e3/108_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:23,771 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:23,772 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742323_1499, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/108_index.zip 2018-07-21T05:32:24,178 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d166d9c12c054565b32921ad905037e3/108_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,183 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,184 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742324_1500, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_108.json 2018-07-21T05:32:24,189 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_108.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,214 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,214 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,214 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,215 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,215 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742325_1501, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddf1e55401e84292840363ddb313a77d/109_descriptor.json 2018-07-21T05:32:24,220 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddf1e55401e84292840363ddb313a77d/109_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,229 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742326_1502, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/109_index.zip 2018-07-21T05:32:24,234 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddf1e55401e84292840363ddb313a77d/109_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,239 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,239 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742327_1503, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_109.json 2018-07-21T05:32:24,245 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_109.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,265 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,265 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742328_1504, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8877841c20e4415d86dee84668b2af74/110_descriptor.json 2018-07-21T05:32:24,270 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8877841c20e4415d86dee84668b2af74/110_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,272 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,273 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742329_1505, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/110_index.zip 2018-07-21T05:32:24,287 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8877841c20e4415d86dee84668b2af74/110_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,292 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,292 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742330_1506, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_110.json 2018-07-21T05:32:24,297 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_110.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,322 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,322 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742331_1507, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae9d31b86fac48eabbe2140e1c2b6d00/111_descriptor.json 2018-07-21T05:32:24,327 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae9d31b86fac48eabbe2140e1c2b6d00/111_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,330 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,330 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742332_1508, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/111_index.zip 2018-07-21T05:32:24,336 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae9d31b86fac48eabbe2140e1c2b6d00/111_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,340 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,340 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742333_1509, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_111.json 2018-07-21T05:32:24,345 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_111.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,364 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,365 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,365 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742334_1510, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8f7a75625812463ca1e78fa472ebf704/112_descriptor.json 2018-07-21T05:32:24,370 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8f7a75625812463ca1e78fa472ebf704/112_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,373 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,373 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742335_1511, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/112_index.zip 2018-07-21T05:32:24,378 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8f7a75625812463ca1e78fa472ebf704/112_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,384 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,385 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742336_1512, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_112.json 2018-07-21T05:32:24,393 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_112.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,402 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,418 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,419 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,419 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,419 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742337_1513, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d496b0c5267402cb427b813946902a5/113_descriptor.json 2018-07-21T05:32:24,425 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d496b0c5267402cb427b813946902a5/113_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,428 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,428 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742338_1514, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/113_index.zip 2018-07-21T05:32:24,433 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d496b0c5267402cb427b813946902a5/113_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,437 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,437 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742339_1515, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_113.json 2018-07-21T05:32:24,442 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_113.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,461 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,461 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742340_1516, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5361e9b3e1f64b0a9ff7518f1b52a5ce/114_descriptor.json 2018-07-21T05:32:24,466 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5361e9b3e1f64b0a9ff7518f1b52a5ce/114_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,469 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,470 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742341_1517, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/114_index.zip 2018-07-21T05:32:24,474 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5361e9b3e1f64b0a9ff7518f1b52a5ce/114_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,477 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,478 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742342_1518, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_114.json 2018-07-21T05:32:24,482 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_114.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,501 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742343_1519, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5778365ca7f54c4c8999b068a58886cf/115_descriptor.json 2018-07-21T05:32:24,506 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5778365ca7f54c4c8999b068a58886cf/115_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,509 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,509 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742344_1520, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/115_index.zip 2018-07-21T05:32:24,513 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5778365ca7f54c4c8999b068a58886cf/115_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,519 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,520 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,520 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,520 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742345_1521, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_115.json 2018-07-21T05:32:24,524 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_115.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,542 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742346_1522, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/72cee3f45cc64ac998426f85af1de411/116_descriptor.json 2018-07-21T05:32:24,547 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/72cee3f45cc64ac998426f85af1de411/116_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,552 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,552 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742347_1523, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/116_index.zip 2018-07-21T05:32:24,557 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/72cee3f45cc64ac998426f85af1de411/116_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,561 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,561 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742348_1524, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_116.json 2018-07-21T05:32:24,565 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_116.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,587 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742349_1525, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f268844f42f4f3eb72c6f07209bf96a/117_descriptor.json 2018-07-21T05:32:24,596 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f268844f42f4f3eb72c6f07209bf96a/117_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,599 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,599 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742350_1526, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/117_index.zip 2018-07-21T05:32:24,604 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f268844f42f4f3eb72c6f07209bf96a/117_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,608 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,609 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,609 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742351_1527, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_117.json 2018-07-21T05:32:24,614 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_117.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,632 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,632 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742352_1528, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f788ac98c604097b2f394732513ce15/118_descriptor.json 2018-07-21T05:32:24,638 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f788ac98c604097b2f394732513ce15/118_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,640 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,640 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742353_1529, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/118_index.zip 2018-07-21T05:32:24,645 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6f788ac98c604097b2f394732513ce15/118_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,649 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742354_1530, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_118.json 2018-07-21T05:32:24,653 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_118.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,676 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,676 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742355_1531, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9fc1bbad85444d9a86beb7c1de945fd4/119_descriptor.json 2018-07-21T05:32:24,681 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9fc1bbad85444d9a86beb7c1de945fd4/119_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,683 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,683 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,684 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,684 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742356_1532, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/119_index.zip 2018-07-21T05:32:24,688 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9fc1bbad85444d9a86beb7c1de945fd4/119_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,693 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742357_1533, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_119.json 2018-07-21T05:32:24,698 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_119.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,716 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,716 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,716 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,716 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,717 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,717 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742358_1534, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7baed1a0b88e474ab2aad3744f7c945e/120_descriptor.json 2018-07-21T05:32:24,721 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7baed1a0b88e474ab2aad3744f7c945e/120_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,724 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,724 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742359_1535, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/120_index.zip 2018-07-21T05:32:24,729 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7baed1a0b88e474ab2aad3744f7c945e/120_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,733 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,733 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742360_1536, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_120.json 2018-07-21T05:32:24,737 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_120.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,755 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742361_1537, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33ff489c7373401c80b04357091e7440/121_descriptor.json 2018-07-21T05:32:24,760 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33ff489c7373401c80b04357091e7440/121_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,763 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,764 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742362_1538, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/121_index.zip 2018-07-21T05:32:24,768 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33ff489c7373401c80b04357091e7440/121_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,773 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,773 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742363_1539, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_121.json 2018-07-21T05:32:24,778 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_121.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,797 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,797 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742364_1540, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2798539d50d747cc9ce12587d25014be/122_descriptor.json 2018-07-21T05:32:24,802 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2798539d50d747cc9ce12587d25014be/122_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,804 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,805 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742365_1541, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/122_index.zip 2018-07-21T05:32:24,809 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2798539d50d747cc9ce12587d25014be/122_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,813 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,813 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742366_1542, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_122.json 2018-07-21T05:32:24,817 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_122.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,836 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,836 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742367_1543, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/21a941e9091d4327be2ac5b73a73d99c/123_descriptor.json 2018-07-21T05:32:24,841 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/21a941e9091d4327be2ac5b73a73d99c/123_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:24,844 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,844 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742368_1544, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/123_index.zip 2018-07-21T05:32:24,848 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/21a941e9091d4327be2ac5b73a73d99c/123_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,852 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,852 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742369_1545, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_123.json 2018-07-21T05:32:24,857 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_123.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,876 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,877 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742370_1546, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3b43ddbe652c4c81bf56a92068a591b7/124_descriptor.json 2018-07-21T05:32:24,881 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3b43ddbe652c4c81bf56a92068a591b7/124_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,884 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742371_1547, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/124_index.zip 2018-07-21T05:32:24,888 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3b43ddbe652c4c81bf56a92068a591b7/124_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,893 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,893 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742372_1548, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_124.json 2018-07-21T05:32:24,897 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_124.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:24,919 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,919 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742373_1549, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57a7bc10178f4ad4b233656a86bf56a0/125_descriptor.json 2018-07-21T05:32:24,924 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57a7bc10178f4ad4b233656a86bf56a0/125_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,926 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,927 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742374_1550, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/125_index.zip 2018-07-21T05:32:24,931 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57a7bc10178f4ad4b233656a86bf56a0/125_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,935 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:24,936 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,936 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742375_1551, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_125.json 2018-07-21T05:32:24,941 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_125.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:24,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,964 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742376_1552, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8133ce86051647118ac12a7be9e39a47/126_descriptor.json 2018-07-21T05:32:24,968 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8133ce86051647118ac12a7be9e39a47/126_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:24,971 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:24,971 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742377_1553, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/126_index.zip 2018-07-21T05:32:24,975 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8133ce86051647118ac12a7be9e39a47/126_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:24,979 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:24,979 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742378_1554, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_126.json 2018-07-21T05:32:24,984 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_126.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,004 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,004 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742379_1555, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c443dbd200f4993883a4ed670cd631c/127_descriptor.json 2018-07-21T05:32:25,009 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c443dbd200f4993883a4ed670cd631c/127_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,012 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,013 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742380_1556, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/127_index.zip 2018-07-21T05:32:25,018 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c443dbd200f4993883a4ed670cd631c/127_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,023 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,023 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742381_1557, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_127.json 2018-07-21T05:32:25,028 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_127.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,047 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742382_1558, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ca9f0a5428456bb3a9ff2f488b91dc/128_descriptor.json 2018-07-21T05:32:25,052 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ca9f0a5428456bb3a9ff2f488b91dc/128_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,055 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,055 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742383_1559, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/128_index.zip 2018-07-21T05:32:25,059 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4ca9f0a5428456bb3a9ff2f488b91dc/128_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,063 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,064 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742384_1560, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_128.json 2018-07-21T05:32:25,068 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_128.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,088 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,089 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742385_1561, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e6e0e375286414cbdb73d7962c77259/129_descriptor.json 2018-07-21T05:32:25,093 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e6e0e375286414cbdb73d7962c77259/129_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,096 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742386_1562, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/129_index.zip 2018-07-21T05:32:25,101 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e6e0e375286414cbdb73d7962c77259/129_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,105 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,105 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742387_1563, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_129.json 2018-07-21T05:32:25,109 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_129.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,128 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,128 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742388_1564, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ee0d80fff794b3997d80ef2485ca9c9/130_descriptor.json 2018-07-21T05:32:25,133 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ee0d80fff794b3997d80ef2485ca9c9/130_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,135 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,135 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742389_1565, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/130_index.zip 2018-07-21T05:32:25,143 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ee0d80fff794b3997d80ef2485ca9c9/130_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,147 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,147 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742390_1566, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_130.json 2018-07-21T05:32:25,151 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_130.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,173 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742391_1567, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b95d5adb7c964721bfdb3a748a431285/131_descriptor.json 2018-07-21T05:32:25,178 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b95d5adb7c964721bfdb3a748a431285/131_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,181 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742392_1568, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/131_index.zip 2018-07-21T05:32:25,186 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b95d5adb7c964721bfdb3a748a431285/131_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,190 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,190 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742393_1569, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_131.json 2018-07-21T05:32:25,195 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_131.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,214 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,214 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,215 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,215 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742394_1570, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64ed92ad8a2349cd83b2a384b6b5b9d1/132_descriptor.json 2018-07-21T05:32:25,222 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64ed92ad8a2349cd83b2a384b6b5b9d1/132_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,225 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,226 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742395_1571, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/132_index.zip 2018-07-21T05:32:25,231 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/64ed92ad8a2349cd83b2a384b6b5b9d1/132_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,235 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,235 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742396_1572, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_132.json 2018-07-21T05:32:25,239 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_132.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,259 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,259 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,259 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,259 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,260 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,260 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,260 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,260 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,260 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742397_1573, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1071077e742cb97a6e488ef8b0de2/133_descriptor.json 2018-07-21T05:32:25,265 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1071077e742cb97a6e488ef8b0de2/133_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,267 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,268 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742398_1574, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/133_index.zip 2018-07-21T05:32:25,273 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1071077e742cb97a6e488ef8b0de2/133_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,283 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,284 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742399_1575, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_133.json 2018-07-21T05:32:25,288 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_133.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,305 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,306 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742400_1576, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aab58c7309bf49ff9b1570917e88f2dc/134_descriptor.json 2018-07-21T05:32:25,311 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aab58c7309bf49ff9b1570917e88f2dc/134_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,314 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742401_1577, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/134_index.zip 2018-07-21T05:32:25,319 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aab58c7309bf49ff9b1570917e88f2dc/134_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,323 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,323 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742402_1578, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_134.json 2018-07-21T05:32:25,327 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_134.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,346 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,347 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742403_1579, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/166e67d22c454b0abe64452f8fa727a9/135_descriptor.json 2018-07-21T05:32:25,351 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/166e67d22c454b0abe64452f8fa727a9/135_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,353 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,353 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742404_1580, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/135_index.zip 2018-07-21T05:32:25,358 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/166e67d22c454b0abe64452f8fa727a9/135_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,361 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,362 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742405_1581, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_135.json 2018-07-21T05:32:25,366 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_135.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,387 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,388 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742406_1582, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06940a25e8404477b3f5461617ad6b9b/136_descriptor.json 2018-07-21T05:32:25,392 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06940a25e8404477b3f5461617ad6b9b/136_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,395 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742407_1583, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/136_index.zip 2018-07-21T05:32:25,400 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06940a25e8404477b3f5461617ad6b9b/136_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,404 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,404 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742408_1584, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_136.json 2018-07-21T05:32:25,409 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_136.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,428 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,428 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742409_1585, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b435f1549634989ad42312d2c3369fe/137_descriptor.json 2018-07-21T05:32:25,433 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b435f1549634989ad42312d2c3369fe/137_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,435 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,436 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742410_1586, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/137_index.zip 2018-07-21T05:32:25,440 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b435f1549634989ad42312d2c3369fe/137_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,444 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,445 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742411_1587, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_137.json 2018-07-21T05:32:25,449 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_137.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,467 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,467 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742412_1588, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6e7cde0f996c419992c36e4d4873c7bc/138_descriptor.json 2018-07-21T05:32:25,471 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6e7cde0f996c419992c36e4d4873c7bc/138_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,474 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742413_1589, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/138_index.zip 2018-07-21T05:32:25,478 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6e7cde0f996c419992c36e4d4873c7bc/138_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,482 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,483 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742414_1590, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_138.json 2018-07-21T05:32:25,487 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_138.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,505 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,505 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742415_1591, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c64f39f7144d1783aac5b27aa8d5f2/139_descriptor.json 2018-07-21T05:32:25,509 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c64f39f7144d1783aac5b27aa8d5f2/139_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,512 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,512 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742416_1592, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/139_index.zip 2018-07-21T05:32:25,516 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50c64f39f7144d1783aac5b27aa8d5f2/139_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,520 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,521 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742417_1593, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_139.json 2018-07-21T05:32:25,525 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_139.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,544 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,544 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742418_1594, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f768338b891f40c9bd427cd2d1baa7ab/140_descriptor.json 2018-07-21T05:32:25,549 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f768338b891f40c9bd427cd2d1baa7ab/140_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,554 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,554 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742419_1595, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/140_index.zip 2018-07-21T05:32:25,559 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f768338b891f40c9bd427cd2d1baa7ab/140_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,563 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742420_1596, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_140.json 2018-07-21T05:32:25,568 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_140.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,590 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,590 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742421_1597, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/466d9f2b17d94de1af53e082ca4b4f0f/141_descriptor.json 2018-07-21T05:32:25,599 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/466d9f2b17d94de1af53e082ca4b4f0f/141_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,603 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,603 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742422_1598, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/141_index.zip 2018-07-21T05:32:25,609 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/466d9f2b17d94de1af53e082ca4b4f0f/141_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,615 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742423_1599, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_141.json 2018-07-21T05:32:25,624 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_141.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,644 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,645 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,645 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742424_1600, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f83eaf4fa2cd4584b8bf5e867cbdcfd8/142_descriptor.json 2018-07-21T05:32:25,650 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f83eaf4fa2cd4584b8bf5e867cbdcfd8/142_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,652 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,652 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,652 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,652 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,653 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,653 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,653 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,653 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,653 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742425_1601, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/142_index.zip 2018-07-21T05:32:25,664 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f83eaf4fa2cd4584b8bf5e867cbdcfd8/142_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,670 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742426_1602, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_142.json 2018-07-21T05:32:25,675 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053159.547-0700_142.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,695 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742427_1603, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1e3cafc9e9a14a42bf465c7d958f019a/0_descriptor.json 2018-07-21T05:32:25,700 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1e3cafc9e9a14a42bf465c7d958f019a/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,707 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,707 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742428_1604, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/0_index.zip 2018-07-21T05:32:25,714 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1e3cafc9e9a14a42bf465c7d958f019a/0_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,718 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,718 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742429_1605, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700.json 2018-07-21T05:32:25,723 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,746 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,746 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742430_1606, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/84fd3004c26a4ae0b944ae7cb1e4bd2b/1_descriptor.json 2018-07-21T05:32:25,751 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/84fd3004c26a4ae0b944ae7cb1e4bd2b/1_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,754 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,754 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742431_1607, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/1_index.zip 2018-07-21T05:32:25,759 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/84fd3004c26a4ae0b944ae7cb1e4bd2b/1_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,763 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,763 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,764 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,764 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742432_1608, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_1.json 2018-07-21T05:32:25,768 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_1.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,792 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,793 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742433_1609, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/409ed45a5b554e46bf0955de7bc3dfb9/2_descriptor.json 2018-07-21T05:32:25,800 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/409ed45a5b554e46bf0955de7bc3dfb9/2_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,802 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,802 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742434_1610, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/2_index.zip 2018-07-21T05:32:25,810 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/409ed45a5b554e46bf0955de7bc3dfb9/2_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,823 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,823 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742435_1611, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_2.json 2018-07-21T05:32:25,829 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_2.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,863 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,863 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742436_1612, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a996ef54a3cb4f5eb25f1b1469ba6469/3_descriptor.json 2018-07-21T05:32:25,868 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a996ef54a3cb4f5eb25f1b1469ba6469/3_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,872 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,872 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742437_1613, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/3_index.zip 2018-07-21T05:32:25,876 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a996ef54a3cb4f5eb25f1b1469ba6469/3_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,880 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,881 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742438_1614, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_3.json 2018-07-21T05:32:25,885 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_3.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,913 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,913 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:25,914 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,914 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742439_1615, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7c152d92046144c58483f379149b6b19/4_descriptor.json 2018-07-21T05:32:25,920 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7c152d92046144c58483f379149b6b19/4_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:25,923 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,923 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742440_1616, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/4_index.zip 2018-07-21T05:32:25,931 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7c152d92046144c58483f379149b6b19/4_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:25,941 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:25,941 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742441_1617, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_4.json 2018-07-21T05:32:25,948 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_4.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:25,986 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:25,986 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742442_1618, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12ef86de203343d2a511bfe754b798ab/5_descriptor.json 2018-07-21T05:32:25,995 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12ef86de203343d2a511bfe754b798ab/5_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:25,998 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:25,998 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742443_1619, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/5_index.zip 2018-07-21T05:32:26,004 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12ef86de203343d2a511bfe754b798ab/5_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,008 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,009 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,009 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,009 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742444_1620, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_5.json 2018-07-21T05:32:26,026 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_5.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,066 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,066 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742445_1621, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7961f3acc71f4662bb5e16d9fc9ebc16/6_descriptor.json 2018-07-21T05:32:26,072 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7961f3acc71f4662bb5e16d9fc9ebc16/6_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,075 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,075 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742446_1622, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/6_index.zip 2018-07-21T05:32:26,082 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7961f3acc71f4662bb5e16d9fc9ebc16/6_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,088 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,088 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,089 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,089 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742447_1623, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_6.json 2018-07-21T05:32:26,094 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_6.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,118 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,118 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742448_1624, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/816529c11dbe4535ae0d2d4cd62650e4/7_descriptor.json 2018-07-21T05:32:26,123 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/816529c11dbe4535ae0d2d4cd62650e4/7_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,125 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,125 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,125 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,125 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,126 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742449_1625, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/7_index.zip 2018-07-21T05:32:26,135 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/816529c11dbe4535ae0d2d4cd62650e4/7_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,140 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,140 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742450_1626, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_7.json 2018-07-21T05:32:26,146 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_7.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,166 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,166 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742451_1627, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68ed6c35360245018186dd7e24b97361/8_descriptor.json 2018-07-21T05:32:26,177 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68ed6c35360245018186dd7e24b97361/8_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,180 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,180 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742452_1628, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/8_index.zip 2018-07-21T05:32:26,188 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/68ed6c35360245018186dd7e24b97361/8_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,193 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742453_1629, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_8.json 2018-07-21T05:32:26,200 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_8.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,242 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742454_1630, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e1fa50820604e1aace10298a47d5fa2/9_descriptor.json 2018-07-21T05:32:26,247 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e1fa50820604e1aace10298a47d5fa2/9_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,250 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742455_1631, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/9_index.zip 2018-07-21T05:32:26,255 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e1fa50820604e1aace10298a47d5fa2/9_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,262 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742456_1632, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_9.json 2018-07-21T05:32:26,267 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_9.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,290 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,290 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742457_1633, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79abac3047144f5e98a378a9e8aa2806/10_descriptor.json 2018-07-21T05:32:26,294 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79abac3047144f5e98a378a9e8aa2806/10_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,297 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,297 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742458_1634, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/10_index.zip 2018-07-21T05:32:26,302 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/79abac3047144f5e98a378a9e8aa2806/10_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,306 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,306 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742459_1635, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_10.json 2018-07-21T05:32:26,310 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_10.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,337 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,337 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742460_1636, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/45d02c774d834c669007a998af6bdd2b/11_descriptor.json 2018-07-21T05:32:26,342 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/45d02c774d834c669007a998af6bdd2b/11_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,347 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,347 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742461_1637, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/11_index.zip 2018-07-21T05:32:26,357 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/45d02c774d834c669007a998af6bdd2b/11_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,366 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742462_1638, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_11.json 2018-07-21T05:32:26,379 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_11.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,409 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,409 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742463_1639, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a2c18f77d29425d93d5ff86623dd032/12_descriptor.json 2018-07-21T05:32:26,415 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a2c18f77d29425d93d5ff86623dd032/12_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,418 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,418 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742464_1640, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/12_index.zip 2018-07-21T05:32:26,423 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a2c18f77d29425d93d5ff86623dd032/12_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,427 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,427 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742465_1641, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_12.json 2018-07-21T05:32:26,432 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_12.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,452 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,453 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742466_1642, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/28c4004318b84897b832ae8c255fea50/13_descriptor.json 2018-07-21T05:32:26,457 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/28c4004318b84897b832ae8c255fea50/13_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,459 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,460 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742467_1643, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/13_index.zip 2018-07-21T05:32:26,464 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/28c4004318b84897b832ae8c255fea50/13_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,468 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,468 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742468_1644, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_13.json 2018-07-21T05:32:26,472 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_13.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,494 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742469_1645, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1efbd5a1904d4a9283ca5c8077c4a5ae/14_descriptor.json 2018-07-21T05:32:26,499 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1efbd5a1904d4a9283ca5c8077c4a5ae/14_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,501 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,501 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742470_1646, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/14_index.zip 2018-07-21T05:32:26,505 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1efbd5a1904d4a9283ca5c8077c4a5ae/14_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,509 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,510 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742471_1647, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_14.json 2018-07-21T05:32:26,514 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_14.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,535 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742472_1648, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6492316a75ec4df6800ef23b3e495007/15_descriptor.json 2018-07-21T05:32:26,539 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6492316a75ec4df6800ef23b3e495007/15_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,542 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,542 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742473_1649, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/15_index.zip 2018-07-21T05:32:26,550 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6492316a75ec4df6800ef23b3e495007/15_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,554 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,554 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742474_1650, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_15.json 2018-07-21T05:32:26,558 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_15.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,580 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742475_1651, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3483bce65e242b8b22055cf28bf0561/16_descriptor.json 2018-07-21T05:32:26,584 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3483bce65e242b8b22055cf28bf0561/16_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,587 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,587 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742476_1652, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/16_index.zip 2018-07-21T05:32:26,592 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3483bce65e242b8b22055cf28bf0561/16_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,598 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,598 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,599 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,599 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742477_1653, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_16.json 2018-07-21T05:32:26,611 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_16.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,634 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,635 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742478_1654, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b1c1189ed1d842569b5bf38761563ab7/17_descriptor.json 2018-07-21T05:32:26,639 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b1c1189ed1d842569b5bf38761563ab7/17_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,641 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,642 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742479_1655, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/17_index.zip 2018-07-21T05:32:26,646 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b1c1189ed1d842569b5bf38761563ab7/17_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,650 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,650 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742480_1656, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_17.json 2018-07-21T05:32:26,655 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_17.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,682 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,683 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742481_1657, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5af19ad82b4b41ee938f9e3421956601/18_descriptor.json 2018-07-21T05:32:26,688 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5af19ad82b4b41ee938f9e3421956601/18_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,690 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,690 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742482_1658, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/18_index.zip 2018-07-21T05:32:26,695 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5af19ad82b4b41ee938f9e3421956601/18_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,699 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742483_1659, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_18.json 2018-07-21T05:32:26,703 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_18.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,725 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,725 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742484_1660, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5189605f98c47858369cfb1d18172cc/19_descriptor.json 2018-07-21T05:32:26,734 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5189605f98c47858369cfb1d18172cc/19_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,737 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,737 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742485_1661, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/19_index.zip 2018-07-21T05:32:26,742 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5189605f98c47858369cfb1d18172cc/19_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,747 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742486_1662, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_19.json 2018-07-21T05:32:26,751 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_19.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,775 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742487_1663, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6eacc6fb3c14a6799075b5bb5d35255/20_descriptor.json 2018-07-21T05:32:26,786 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6eacc6fb3c14a6799075b5bb5d35255/20_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:26,789 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,789 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742488_1664, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/20_index.zip 2018-07-21T05:32:26,798 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6eacc6fb3c14a6799075b5bb5d35255/20_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,802 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,803 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,803 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,803 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,803 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742489_1665, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_20.json 2018-07-21T05:32:26,808 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_20.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,835 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,836 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742490_1666, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aeced2d45ba94416ae0ab5c4f9cc5bcc/21_descriptor.json 2018-07-21T05:32:26,854 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aeced2d45ba94416ae0ab5c4f9cc5bcc/21_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,859 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,859 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,860 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742491_1667, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/21_index.zip 2018-07-21T05:32:26,866 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aeced2d45ba94416ae0ab5c4f9cc5bcc/21_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:26,873 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,873 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742492_1668, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_21.json 2018-07-21T05:32:26,878 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_21.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,901 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,901 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742493_1669, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91ed2df5bfbd40129f97da82d14c78b1/22_descriptor.json 2018-07-21T05:32:26,906 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91ed2df5bfbd40129f97da82d14c78b1/22_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,908 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,909 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742494_1670, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/22_index.zip 2018-07-21T05:32:26,917 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91ed2df5bfbd40129f97da82d14c78b1/22_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,925 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,925 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742495_1671, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_22.json 2018-07-21T05:32:26,930 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_22.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:26,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,953 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742496_1672, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9066d2497d6645ca81877906cde56e9d/23_descriptor.json 2018-07-21T05:32:26,958 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9066d2497d6645ca81877906cde56e9d/23_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:26,960 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,960 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742497_1673, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/23_index.zip 2018-07-21T05:32:26,965 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9066d2497d6645ca81877906cde56e9d/23_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:26,969 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:26,970 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742498_1674, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_23.json 2018-07-21T05:32:27,377 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_23.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,404 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,404 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,404 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,405 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,405 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742499_1675, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c97217239114980853e40a4282b5e64/24_descriptor.json 2018-07-21T05:32:27,410 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c97217239114980853e40a4282b5e64/24_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,414 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742500_1676, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/24_index.zip 2018-07-21T05:32:27,419 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c97217239114980853e40a4282b5e64/24_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,423 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742501_1677, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_24.json 2018-07-21T05:32:27,424 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:27,428 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_24.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,449 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,449 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,449 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,450 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,450 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742502_1678, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19ade41e15d74033a20504f002a05e0e/25_descriptor.json 2018-07-21T05:32:27,467 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19ade41e15d74033a20504f002a05e0e/25_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,470 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,470 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742503_1679, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/25_index.zip 2018-07-21T05:32:27,495 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19ade41e15d74033a20504f002a05e0e/25_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,506 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,506 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742504_1680, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_25.json 2018-07-21T05:32:27,516 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_25.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,542 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,542 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742505_1681, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c8e925d91db43fb94c19cc70afca23a/26_descriptor.json 2018-07-21T05:32:27,551 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c8e925d91db43fb94c19cc70afca23a/26_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,559 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,559 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742506_1682, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/26_index.zip 2018-07-21T05:32:27,569 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c8e925d91db43fb94c19cc70afca23a/26_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,574 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,574 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742507_1683, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_26.json 2018-07-21T05:32:27,579 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_26.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,619 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,619 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742508_1684, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96678dd277c64d3287336fe792149a3b/27_descriptor.json 2018-07-21T05:32:27,625 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96678dd277c64d3287336fe792149a3b/27_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,633 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,633 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742509_1685, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/27_index.zip 2018-07-21T05:32:27,640 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96678dd277c64d3287336fe792149a3b/27_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,646 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,646 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742510_1686, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_27.json 2018-07-21T05:32:27,653 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_27.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,675 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,675 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,675 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,675 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,676 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742511_1687, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b23708462274c9da4febae576d00d04/28_descriptor.json 2018-07-21T05:32:27,686 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b23708462274c9da4febae576d00d04/28_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:27,689 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,690 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742512_1688, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/28_index.zip 2018-07-21T05:32:27,701 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b23708462274c9da4febae576d00d04/28_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:27,706 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,706 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742513_1689, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_28.json 2018-07-21T05:32:27,712 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_28.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:27,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,740 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742514_1690, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12c8c50894674a5f9dabca64dd21c184/29_descriptor.json 2018-07-21T05:32:27,746 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12c8c50894674a5f9dabca64dd21c184/29_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,749 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,749 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742515_1691, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/29_index.zip 2018-07-21T05:32:27,754 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12c8c50894674a5f9dabca64dd21c184/29_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:27,759 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,759 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742516_1692, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_29.json 2018-07-21T05:32:27,764 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_29.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,785 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:27,786 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,786 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742517_1693, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aed019fa5c14438f9cd0344cfa31a1eb/30_descriptor.json 2018-07-21T05:32:27,790 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aed019fa5c14438f9cd0344cfa31a1eb/30_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,793 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:27,793 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742518_1694, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/30_index.zip 2018-07-21T05:32:27,798 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aed019fa5c14438f9cd0344cfa31a1eb/30_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,802 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,802 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742519_1695, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_30.json 2018-07-21T05:32:27,808 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_30.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:27,836 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,836 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742520_1696, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c1d3f2d84fd436f8b266e93f06063ba/31_descriptor.json 2018-07-21T05:32:27,840 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c1d3f2d84fd436f8b266e93f06063ba/31_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:27,843 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:27,843 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742521_1697, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/31_index.zip 2018-07-21T05:32:28,251 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c1d3f2d84fd436f8b266e93f06063ba/31_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,256 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,256 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742522_1698, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_31.json 2018-07-21T05:32:28,261 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_31.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:28,285 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,285 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742523_1699, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2f1c85bc0a948b0b374cf02017ab6df/32_descriptor.json 2018-07-21T05:32:28,292 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2f1c85bc0a948b0b374cf02017ab6df/32_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,294 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,295 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742524_1700, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/32_index.zip 2018-07-21T05:32:28,300 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2f1c85bc0a948b0b374cf02017ab6df/32_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,304 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,304 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742525_1701, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_32.json 2018-07-21T05:32:28,309 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_32.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,330 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,330 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742526_1702, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41042f91cff34da2acd501dc6280232f/33_descriptor.json 2018-07-21T05:32:28,336 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41042f91cff34da2acd501dc6280232f/33_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,343 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,343 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742527_1703, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/33_index.zip 2018-07-21T05:32:28,353 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41042f91cff34da2acd501dc6280232f/33_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,357 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,358 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742528_1704, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_33.json 2018-07-21T05:32:28,364 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_33.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,388 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,388 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742529_1705, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e408cf50cd9541698b84f9a66b0cc867/34_descriptor.json 2018-07-21T05:32:28,394 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e408cf50cd9541698b84f9a66b0cc867/34_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,396 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,396 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742530_1706, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/34_index.zip 2018-07-21T05:32:28,401 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e408cf50cd9541698b84f9a66b0cc867/34_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,406 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,406 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742531_1707, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_34.json 2018-07-21T05:32:28,411 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_34.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,433 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742532_1708, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90ccf3a145e74353afc412b3e9407971/35_descriptor.json 2018-07-21T05:32:28,438 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90ccf3a145e74353afc412b3e9407971/35_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,441 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742533_1709, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/35_index.zip 2018-07-21T05:32:28,445 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90ccf3a145e74353afc412b3e9407971/35_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,450 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,450 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,450 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742534_1710, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_35.json 2018-07-21T05:32:28,454 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_35.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,475 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,476 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742535_1711, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f1d9c20fd64d4c0298581956b8fc4606/36_descriptor.json 2018-07-21T05:32:28,480 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f1d9c20fd64d4c0298581956b8fc4606/36_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,483 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,483 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742536_1712, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/36_index.zip 2018-07-21T05:32:28,487 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f1d9c20fd64d4c0298581956b8fc4606/36_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,491 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,491 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742537_1713, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_36.json 2018-07-21T05:32:28,496 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_36.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,515 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,515 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,515 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,515 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,516 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,516 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,516 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,516 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,516 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742538_1714, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98ed9c4bf0d04b89aba0d1700862ddcd/37_descriptor.json 2018-07-21T05:32:28,524 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98ed9c4bf0d04b89aba0d1700862ddcd/37_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,527 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,527 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742539_1715, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/37_index.zip 2018-07-21T05:32:28,532 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98ed9c4bf0d04b89aba0d1700862ddcd/37_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,536 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,536 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742540_1716, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_37.json 2018-07-21T05:32:28,541 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_37.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,567 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,567 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742541_1717, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1f38e49a9af0495382205777a0465e61/38_descriptor.json 2018-07-21T05:32:28,572 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1f38e49a9af0495382205777a0465e61/38_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,577 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,577 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742542_1718, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/38_index.zip 2018-07-21T05:32:28,582 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1f38e49a9af0495382205777a0465e61/38_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,586 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,586 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742543_1719, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_38.json 2018-07-21T05:32:28,591 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_38.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,611 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742544_1720, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea348a79dac14e04a61f70a92fd23c04/39_descriptor.json 2018-07-21T05:32:28,616 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea348a79dac14e04a61f70a92fd23c04/39_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,618 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,618 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742545_1721, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/39_index.zip 2018-07-21T05:32:28,623 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea348a79dac14e04a61f70a92fd23c04/39_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,627 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742546_1722, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_39.json 2018-07-21T05:32:28,631 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_39.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,650 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742547_1723, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c023ae9192440158fe0b0bffd1bf868/40_descriptor.json 2018-07-21T05:32:28,655 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c023ae9192440158fe0b0bffd1bf868/40_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,657 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,657 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742548_1724, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/40_index.zip 2018-07-21T05:32:28,662 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c023ae9192440158fe0b0bffd1bf868/40_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:28,666 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,666 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742549_1725, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_40.json 2018-07-21T05:32:28,671 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_40.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,692 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,692 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742550_1726, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60bd8ebcc4dc45ee8f05dc065773a674/41_descriptor.json 2018-07-21T05:32:28,697 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60bd8ebcc4dc45ee8f05dc065773a674/41_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,700 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,700 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742551_1727, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/41_index.zip 2018-07-21T05:32:28,704 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60bd8ebcc4dc45ee8f05dc065773a674/41_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,708 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,708 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742552_1728, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_41.json 2018-07-21T05:32:28,713 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_41.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,733 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,733 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742553_1729, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/20583983d90248fa844a8c286562df3f/42_descriptor.json 2018-07-21T05:32:28,738 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/20583983d90248fa844a8c286562df3f/42_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,740 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,741 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742554_1730, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/42_index.zip 2018-07-21T05:32:28,745 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/20583983d90248fa844a8c286562df3f/42_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,749 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742555_1731, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_42.json 2018-07-21T05:32:28,754 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_42.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,774 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,775 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742556_1732, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96198329d3ee40e4a687845aa8a5cd9e/43_descriptor.json 2018-07-21T05:32:28,780 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96198329d3ee40e4a687845aa8a5cd9e/43_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,782 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,782 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742557_1733, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/43_index.zip 2018-07-21T05:32:28,787 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/96198329d3ee40e4a687845aa8a5cd9e/43_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,791 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742558_1734, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_43.json 2018-07-21T05:32:28,800 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_43.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,828 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742559_1735, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e52fb49d3bcd4976b651372007d2458e/44_descriptor.json 2018-07-21T05:32:28,833 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e52fb49d3bcd4976b651372007d2458e/44_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,835 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742560_1736, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/44_index.zip 2018-07-21T05:32:28,840 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e52fb49d3bcd4976b651372007d2458e/44_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,844 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,844 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742561_1737, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_44.json 2018-07-21T05:32:28,849 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_44.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,869 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,870 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742562_1738, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65d01c2552a84658868f9dd8f12cc401/45_descriptor.json 2018-07-21T05:32:28,874 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65d01c2552a84658868f9dd8f12cc401/45_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,877 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,877 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742563_1739, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/45_index.zip 2018-07-21T05:32:28,882 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65d01c2552a84658868f9dd8f12cc401/45_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,886 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,886 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742564_1740, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_45.json 2018-07-21T05:32:28,891 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_45.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,911 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,912 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,912 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742565_1741, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a57cf00b8b043038472afdf2f5def7a/46_descriptor.json 2018-07-21T05:32:28,917 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a57cf00b8b043038472afdf2f5def7a/46_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:28,919 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,919 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742566_1742, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/46_index.zip 2018-07-21T05:32:28,924 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1a57cf00b8b043038472afdf2f5def7a/46_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:28,928 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,928 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742567_1743, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_46.json 2018-07-21T05:32:28,933 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_46.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:28,956 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,956 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742568_1744, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6d59fd4345b44d89315141c67d4de3c/47_descriptor.json 2018-07-21T05:32:28,963 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6d59fd4345b44d89315141c67d4de3c/47_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:28,970 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:28,970 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742569_1745, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/47_index.zip 2018-07-21T05:32:28,976 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f6d59fd4345b44d89315141c67d4de3c/47_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:28,992 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,992 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:28,993 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:28,993 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742570_1746, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_47.json 2018-07-21T05:32:29,000 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_47.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,022 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,023 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742571_1747, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81495476327d4d4d9a06a010f1a2e450/48_descriptor.json 2018-07-21T05:32:29,028 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81495476327d4d4d9a06a010f1a2e450/48_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:29,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,031 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742572_1748, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/48_index.zip 2018-07-21T05:32:29,037 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81495476327d4d4d9a06a010f1a2e450/48_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,041 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,041 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742573_1749, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_48.json 2018-07-21T05:32:29,052 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_48.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,080 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,081 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742574_1750, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b6f455d779c447cbd881dbe6ef29234/49_descriptor.json 2018-07-21T05:32:29,088 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b6f455d779c447cbd881dbe6ef29234/49_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:29,091 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,091 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742575_1751, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/49_index.zip 2018-07-21T05:32:29,496 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b6f455d779c447cbd881dbe6ef29234/49_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,501 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,501 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742576_1752, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_49.json 2018-07-21T05:32:29,506 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_49.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,528 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,528 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742577_1753, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fff4d003f4b44d69c631477e721c138/50_descriptor.json 2018-07-21T05:32:29,533 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fff4d003f4b44d69c631477e721c138/50_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,537 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,537 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742578_1754, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/50_index.zip 2018-07-21T05:32:29,543 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0fff4d003f4b44d69c631477e721c138/50_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,548 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,548 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742579_1755, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_50.json 2018-07-21T05:32:29,556 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_50.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,598 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,599 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,599 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742580_1756, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2696ff420cdc4f00a3ec756c23282e6e/51_descriptor.json 2018-07-21T05:32:29,604 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2696ff420cdc4f00a3ec756c23282e6e/51_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,607 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742581_1757, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/51_index.zip 2018-07-21T05:32:29,612 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2696ff420cdc4f00a3ec756c23282e6e/51_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,616 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,616 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742582_1758, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_51.json 2018-07-21T05:32:29,621 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_51.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,646 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,647 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742583_1759, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3ffaa04c8a6b4019bb5e757054f20ef5/52_descriptor.json 2018-07-21T05:32:29,657 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3ffaa04c8a6b4019bb5e757054f20ef5/52_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,660 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,660 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742584_1760, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/52_index.zip 2018-07-21T05:32:29,666 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3ffaa04c8a6b4019bb5e757054f20ef5/52_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,670 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,670 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:29,671 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,671 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742585_1761, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_52.json 2018-07-21T05:32:29,676 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_52.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,695 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,696 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742586_1762, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ad4436f9b0d54751b67e235081284cdb/53_descriptor.json 2018-07-21T05:32:29,702 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ad4436f9b0d54751b67e235081284cdb/53_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,705 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,705 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742587_1763, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/53_index.zip 2018-07-21T05:32:29,714 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ad4436f9b0d54751b67e235081284cdb/53_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,719 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,719 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742588_1764, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_53.json 2018-07-21T05:32:29,724 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_53.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,745 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,745 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742589_1765, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a285c087a04741c2899d9658db510ad8/54_descriptor.json 2018-07-21T05:32:29,749 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a285c087a04741c2899d9658db510ad8/54_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,752 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,752 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742590_1766, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/54_index.zip 2018-07-21T05:32:29,757 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a285c087a04741c2899d9658db510ad8/54_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,761 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742591_1767, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_54.json 2018-07-21T05:32:29,770 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_54.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:29,793 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,793 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742592_1768, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c6e09cfdb474c9fa53dc9a3c2da54d8/55_descriptor.json 2018-07-21T05:32:29,803 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c6e09cfdb474c9fa53dc9a3c2da54d8/55_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,807 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,807 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742593_1769, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/55_index.zip 2018-07-21T05:32:29,821 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6c6e09cfdb474c9fa53dc9a3c2da54d8/55_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,827 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,827 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742594_1770, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_55.json 2018-07-21T05:32:29,836 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_55.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,860 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,860 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742595_1771, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dde786bac25a4665b41cd558affa953b/56_descriptor.json 2018-07-21T05:32:29,865 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dde786bac25a4665b41cd558affa953b/56_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:29,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,868 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742596_1772, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/56_index.zip 2018-07-21T05:32:29,873 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dde786bac25a4665b41cd558affa953b/56_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,876 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,876 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742597_1773, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_56.json 2018-07-21T05:32:29,881 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_56.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,903 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742598_1774, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd2f21b35efd4f1d95b40ecd11070a3d/57_descriptor.json 2018-07-21T05:32:29,908 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd2f21b35efd4f1d95b40ecd11070a3d/57_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,910 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,910 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742599_1775, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/57_index.zip 2018-07-21T05:32:29,915 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd2f21b35efd4f1d95b40ecd11070a3d/57_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,922 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,922 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742600_1776, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_57.json 2018-07-21T05:32:29,926 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_57.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:29,955 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,955 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742601_1777, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/39df27d8c2704e72bc56890040769cf9/58_descriptor.json 2018-07-21T05:32:29,960 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/39df27d8c2704e72bc56890040769cf9/58_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,962 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,962 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742602_1778, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/58_index.zip 2018-07-21T05:32:29,969 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/39df27d8c2704e72bc56890040769cf9/58_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:29,973 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:29,973 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742603_1779, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_58.json 2018-07-21T05:32:29,978 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_58.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:29,997 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:29,997 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742604_1780, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/452edf1defaa40d29142b656255a53a0/59_descriptor.json 2018-07-21T05:32:30,002 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/452edf1defaa40d29142b656255a53a0/59_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,005 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,005 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742605_1781, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/59_index.zip 2018-07-21T05:32:30,009 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/452edf1defaa40d29142b656255a53a0/59_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,013 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742606_1782, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_59.json 2018-07-21T05:32:30,021 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_59.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,040 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742607_1783, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc9ae92d34944378176647cd27311c0/60_descriptor.json 2018-07-21T05:32:30,045 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc9ae92d34944378176647cd27311c0/60_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,047 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,047 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742608_1784, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/60_index.zip 2018-07-21T05:32:30,052 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc9ae92d34944378176647cd27311c0/60_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,056 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,056 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742609_1785, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_60.json 2018-07-21T05:32:30,062 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_60.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,082 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,082 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,083 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742610_1786, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc3a7a2afcd94a5ba41200a7fbcaaa90/61_descriptor.json 2018-07-21T05:32:30,088 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc3a7a2afcd94a5ba41200a7fbcaaa90/61_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,090 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,090 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742611_1787, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/61_index.zip 2018-07-21T05:32:30,095 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc3a7a2afcd94a5ba41200a7fbcaaa90/61_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,098 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,098 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,099 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742612_1788, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_61.json 2018-07-21T05:32:30,104 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_61.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,123 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,124 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742613_1789, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e376c41cfe0443478b53f7ec60fd81fd/62_descriptor.json 2018-07-21T05:32:30,128 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e376c41cfe0443478b53f7ec60fd81fd/62_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:30,130 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,131 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742614_1790, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/62_index.zip 2018-07-21T05:32:30,135 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e376c41cfe0443478b53f7ec60fd81fd/62_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,139 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,139 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742615_1791, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_62.json 2018-07-21T05:32:30,144 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_62.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,163 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,163 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742616_1792, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c37a04c8993545afafd6e0d4f6b552c9/63_descriptor.json 2018-07-21T05:32:30,168 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c37a04c8993545afafd6e0d4f6b552c9/63_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,170 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742617_1793, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/63_index.zip 2018-07-21T05:32:30,174 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c37a04c8993545afafd6e0d4f6b552c9/63_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,178 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,178 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742618_1794, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_63.json 2018-07-21T05:32:30,183 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_63.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,202 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,202 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742619_1795, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ced1cc3c00974971846e1269dc2e4178/64_descriptor.json 2018-07-21T05:32:30,207 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ced1cc3c00974971846e1269dc2e4178/64_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,209 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,209 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742620_1796, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/64_index.zip 2018-07-21T05:32:30,213 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ced1cc3c00974971846e1269dc2e4178/64_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,217 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,217 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742621_1797, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_64.json 2018-07-21T05:32:30,221 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_64.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,245 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,245 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742622_1798, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8e44a309685498aa3bce90352c3cf64/65_descriptor.json 2018-07-21T05:32:30,250 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8e44a309685498aa3bce90352c3cf64/65_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,253 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742623_1799, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/65_index.zip 2018-07-21T05:32:30,257 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8e44a309685498aa3bce90352c3cf64/65_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,261 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,261 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742624_1800, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_65.json 2018-07-21T05:32:30,268 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_65.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,288 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,288 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742625_1801, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e742cfafed3541b28034547b1fdd42b3/66_descriptor.json 2018-07-21T05:32:30,293 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e742cfafed3541b28034547b1fdd42b3/66_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,295 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,295 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742626_1802, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/66_index.zip 2018-07-21T05:32:30,301 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e742cfafed3541b28034547b1fdd42b3/66_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,304 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,305 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742627_1803, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_66.json 2018-07-21T05:32:30,308 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_66.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,328 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742628_1804, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d427043b96a140c1893b64d07898175e/67_descriptor.json 2018-07-21T05:32:30,333 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d427043b96a140c1893b64d07898175e/67_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,335 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,336 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742629_1805, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/67_index.zip 2018-07-21T05:32:30,340 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d427043b96a140c1893b64d07898175e/67_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,345 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,346 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742630_1806, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_67.json 2018-07-21T05:32:30,352 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_67.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,372 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,372 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742631_1807, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a2aa28d07d9476a8ba45dcea51a744b/68_descriptor.json 2018-07-21T05:32:30,379 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a2aa28d07d9476a8ba45dcea51a744b/68_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,381 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,381 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,382 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,382 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742632_1808, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/68_index.zip 2018-07-21T05:32:30,386 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a2aa28d07d9476a8ba45dcea51a744b/68_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,393 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,393 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742633_1809, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_68.json 2018-07-21T05:32:30,399 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_68.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:30,421 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,421 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742634_1810, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/656279cda98c4b0e9dcb80208c1cc0fd/69_descriptor.json 2018-07-21T05:32:30,426 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/656279cda98c4b0e9dcb80208c1cc0fd/69_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,428 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,428 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742635_1811, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/69_index.zip 2018-07-21T05:32:30,436 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/656279cda98c4b0e9dcb80208c1cc0fd/69_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,438 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:30,441 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,441 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,441 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,442 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,442 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,442 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,442 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:30,442 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,442 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742636_1812, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_69.json 2018-07-21T05:32:30,447 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_69.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,467 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742637_1813, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61a9ce974cfd4726abc3a1aecdf5cbe3/70_descriptor.json 2018-07-21T05:32:30,472 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61a9ce974cfd4726abc3a1aecdf5cbe3/70_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,474 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,475 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,475 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742638_1814, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/70_index.zip 2018-07-21T05:32:30,487 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61a9ce974cfd4726abc3a1aecdf5cbe3/70_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,491 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,492 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,492 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,492 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742639_1815, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_70.json 2018-07-21T05:32:30,496 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_70.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,515 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,515 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742640_1816, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/470356e41a4941c385f50ba9401c2905/71_descriptor.json 2018-07-21T05:32:30,520 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/470356e41a4941c385f50ba9401c2905/71_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,522 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,523 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742641_1817, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/71_index.zip 2018-07-21T05:32:30,530 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/470356e41a4941c385f50ba9401c2905/71_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:30,534 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,535 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742642_1818, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_71.json 2018-07-21T05:32:30,547 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_71.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,571 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,571 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742643_1819, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05775dc715e4b29bcd9f206594046e5/72_descriptor.json 2018-07-21T05:32:30,578 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05775dc715e4b29bcd9f206594046e5/72_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:30,580 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,580 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742644_1820, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/72_index.zip 2018-07-21T05:32:30,584 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d05775dc715e4b29bcd9f206594046e5/72_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,588 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,588 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742645_1821, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_72.json 2018-07-21T05:32:30,592 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_72.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:30,612 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,612 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742646_1822, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc807c73d7d14e43a213d220bab3a3d0/73_descriptor.json 2018-07-21T05:32:30,616 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc807c73d7d14e43a213d220bab3a3d0/73_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,618 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:30,619 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:30,619 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742647_1823, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/73_index.zip 2018-07-21T05:32:30,623 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cc807c73d7d14e43a213d220bab3a3d0/73_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:30,627 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,627 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742648_1824, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_73.json 2018-07-21T05:32:30,637 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_73.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:30,655 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:30,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:30,656 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742649_1825, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/213d9907cf4540219073625591f08875/74_descriptor.json 2018-07-21T05:32:31,062 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/213d9907cf4540219073625591f08875/74_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,065 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,065 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742650_1826, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/74_index.zip 2018-07-21T05:32:31,070 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/213d9907cf4540219073625591f08875/74_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:31,074 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,074 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742651_1827, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_74.json 2018-07-21T05:32:31,481 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_74.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,501 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,501 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,501 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,502 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,502 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,502 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,502 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:31,502 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,502 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742652_1828, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7d3d020b027489ab6bd0156efe7fcd8/75_descriptor.json 2018-07-21T05:32:31,506 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7d3d020b027489ab6bd0156efe7fcd8/75_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:31,509 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,509 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742653_1829, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/75_index.zip 2018-07-21T05:32:31,514 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7d3d020b027489ab6bd0156efe7fcd8/75_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,518 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742654_1830, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_75.json 2018-07-21T05:32:31,522 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_75.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,541 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,541 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,541 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,541 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,542 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,542 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,542 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:31,542 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,542 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742655_1831, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7edf5fed6bba4434a9b44be394c96625/76_descriptor.json 2018-07-21T05:32:31,549 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7edf5fed6bba4434a9b44be394c96625/76_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,555 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,555 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:31,556 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,556 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742656_1832, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/76_index.zip 2018-07-21T05:32:31,561 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7edf5fed6bba4434a9b44be394c96625/76_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,565 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,565 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:31,566 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,566 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742657_1833, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_76.json 2018-07-21T05:32:31,571 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_76.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,587 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,588 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742658_1834, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c6d1203219343e9ae8b7281277e21ae/77_descriptor.json 2018-07-21T05:32:31,592 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c6d1203219343e9ae8b7281277e21ae/77_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,594 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,594 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742659_1835, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/77_index.zip 2018-07-21T05:32:31,600 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c6d1203219343e9ae8b7281277e21ae/77_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:31,604 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,604 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742660_1836, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_77.json 2018-07-21T05:32:31,609 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_77.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,627 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,627 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,627 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,627 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,628 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,628 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,628 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:31,628 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,628 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742661_1837, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec034279e82243589b20b11aaed7a29d/78_descriptor.json 2018-07-21T05:32:31,633 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec034279e82243589b20b11aaed7a29d/78_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:31,635 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,635 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742662_1838, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/78_index.zip 2018-07-21T05:32:31,640 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec034279e82243589b20b11aaed7a29d/78_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:31,644 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,644 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742663_1839, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_78.json 2018-07-21T05:32:31,650 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_78.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:31,676 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,676 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742664_1840, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c3e82b5d6a94f8099085831a01ffa68/79_descriptor.json 2018-07-21T05:32:31,681 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c3e82b5d6a94f8099085831a01ffa68/79_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:31,684 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,684 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742665_1841, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/79_index.zip 2018-07-21T05:32:31,689 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c3e82b5d6a94f8099085831a01ffa68/79_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:31,695 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:31,696 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742666_1842, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_79.json 2018-07-21T05:32:31,705 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_79.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:31,726 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,726 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742667_1843, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/699187345e6c445ea652d0535eda1e4e/80_descriptor.json 2018-07-21T05:32:31,730 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/699187345e6c445ea652d0535eda1e4e/80_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,732 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:31,733 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:31,733 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742668_1844, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/80_index.zip 2018-07-21T05:32:31,737 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/699187345e6c445ea652d0535eda1e4e/80_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:31,741 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:31,741 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742669_1845, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_80.json 2018-07-21T05:32:32,147 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_80.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,167 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,167 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742670_1846, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7fcfef206504a29b57617043a25e09d/81_descriptor.json 2018-07-21T05:32:32,174 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7fcfef206504a29b57617043a25e09d/81_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,177 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,177 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742671_1847, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/81_index.zip 2018-07-21T05:32:32,189 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7fcfef206504a29b57617043a25e09d/81_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,199 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,199 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,199 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,200 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,200 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,200 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,200 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,200 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,200 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742672_1848, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_81.json 2018-07-21T05:32:32,205 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_81.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,226 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,226 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,227 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742673_1849, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6226e0236314d8aa9b33d707da29b88/82_descriptor.json 2018-07-21T05:32:32,238 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6226e0236314d8aa9b33d707da29b88/82_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,241 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,241 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742674_1850, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/82_index.zip 2018-07-21T05:32:32,247 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6226e0236314d8aa9b33d707da29b88/82_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,253 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,253 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742675_1851, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_82.json 2018-07-21T05:32:32,259 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_82.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,281 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742676_1852, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/789912e7c27445db9fdf24bdc5ade161/83_descriptor.json 2018-07-21T05:32:32,286 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/789912e7c27445db9fdf24bdc5ade161/83_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,289 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,289 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742677_1853, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/83_index.zip 2018-07-21T05:32:32,293 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/789912e7c27445db9fdf24bdc5ade161/83_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,298 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742678_1854, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_83.json 2018-07-21T05:32:32,302 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_83.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,322 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,322 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742679_1855, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a0dc9f6a8141b2bea3068b103ee101/84_descriptor.json 2018-07-21T05:32:32,326 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a0dc9f6a8141b2bea3068b103ee101/84_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,329 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742680_1856, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/84_index.zip 2018-07-21T05:32:32,333 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a0dc9f6a8141b2bea3068b103ee101/84_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,337 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,337 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742681_1857, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_84.json 2018-07-21T05:32:32,341 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_84.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,363 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,363 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742682_1858, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4f8443823e7e4bd495ca50ab7d7fd159/85_descriptor.json 2018-07-21T05:32:32,367 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4f8443823e7e4bd495ca50ab7d7fd159/85_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,370 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,370 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742683_1859, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/85_index.zip 2018-07-21T05:32:32,375 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4f8443823e7e4bd495ca50ab7d7fd159/85_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,379 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,380 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742684_1860, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_85.json 2018-07-21T05:32:32,386 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_85.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,410 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,411 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742685_1861, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/950471e0094f45a9a964842c5e92574f/86_descriptor.json 2018-07-21T05:32:32,415 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/950471e0094f45a9a964842c5e92574f/86_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,418 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742686_1862, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/86_index.zip 2018-07-21T05:32:32,422 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/950471e0094f45a9a964842c5e92574f/86_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,425 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,425 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,426 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,426 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742687_1863, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_86.json 2018-07-21T05:32:32,434 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_86.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,454 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742688_1864, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0463773b141845d2b2da4fc6044101da/87_descriptor.json 2018-07-21T05:32:32,458 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0463773b141845d2b2da4fc6044101da/87_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,460 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,460 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742689_1865, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/87_index.zip 2018-07-21T05:32:32,466 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0463773b141845d2b2da4fc6044101da/87_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,470 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,470 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742690_1866, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_87.json 2018-07-21T05:32:32,474 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_87.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,495 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,495 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742691_1867, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b8b248b58e245ba8c03ff13ea383800/88_descriptor.json 2018-07-21T05:32:32,500 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b8b248b58e245ba8c03ff13ea383800/88_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,503 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,503 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742692_1868, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/88_index.zip 2018-07-21T05:32:32,508 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b8b248b58e245ba8c03ff13ea383800/88_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,512 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,513 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742693_1869, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_88.json 2018-07-21T05:32:32,517 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_88.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,535 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,535 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742694_1870, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/946ea1cbb42b4276832222ee34d62f20/89_descriptor.json 2018-07-21T05:32:32,540 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/946ea1cbb42b4276832222ee34d62f20/89_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,542 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,542 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742695_1871, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/89_index.zip 2018-07-21T05:32:32,546 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/946ea1cbb42b4276832222ee34d62f20/89_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,553 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,553 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742696_1872, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_89.json 2018-07-21T05:32:32,557 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_89.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,574 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,574 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,575 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,575 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742697_1873, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a4674a46233400a95000e404abc9052/90_descriptor.json 2018-07-21T05:32:32,580 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a4674a46233400a95000e404abc9052/90_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,582 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,582 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742698_1874, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/90_index.zip 2018-07-21T05:32:32,589 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a4674a46233400a95000e404abc9052/90_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,593 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,593 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742699_1875, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_90.json 2018-07-21T05:32:32,598 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_90.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,617 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,618 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742700_1876, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9553fa5b6994fde9fd2ec82c641facf/91_descriptor.json 2018-07-21T05:32:32,622 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9553fa5b6994fde9fd2ec82c641facf/91_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,624 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,624 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742701_1877, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/91_index.zip 2018-07-21T05:32:32,628 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9553fa5b6994fde9fd2ec82c641facf/91_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,632 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,632 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742702_1878, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_91.json 2018-07-21T05:32:32,637 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_91.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,655 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,656 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742703_1879, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9bdfb2f0a06e43a088989b4b0a92b894/92_descriptor.json 2018-07-21T05:32:32,660 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9bdfb2f0a06e43a088989b4b0a92b894/92_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,662 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,662 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742704_1880, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/92_index.zip 2018-07-21T05:32:32,666 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9bdfb2f0a06e43a088989b4b0a92b894/92_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,669 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,670 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742705_1881, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_92.json 2018-07-21T05:32:32,674 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_92.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,692 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,692 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,693 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,693 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742706_1882, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d85d8587000442e389d49e5fa9f13e02/93_descriptor.json 2018-07-21T05:32:32,697 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d85d8587000442e389d49e5fa9f13e02/93_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,700 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,700 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742707_1883, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/93_index.zip 2018-07-21T05:32:32,704 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d85d8587000442e389d49e5fa9f13e02/93_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,708 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742708_1884, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_93.json 2018-07-21T05:32:32,712 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_93.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,742 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,742 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742709_1885, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c96f6656b2ea4542bfcbf3039393934b/94_descriptor.json 2018-07-21T05:32:32,747 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c96f6656b2ea4542bfcbf3039393934b/94_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,750 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742710_1886, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/94_index.zip 2018-07-21T05:32:32,755 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c96f6656b2ea4542bfcbf3039393934b/94_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,759 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742711_1887, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_94.json 2018-07-21T05:32:32,764 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_94.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,783 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,783 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742712_1888, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da9202340dff4489ac50dd57980c04db/95_descriptor.json 2018-07-21T05:32:32,788 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da9202340dff4489ac50dd57980c04db/95_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,790 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,790 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742713_1889, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/95_index.zip 2018-07-21T05:32:32,794 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da9202340dff4489ac50dd57980c04db/95_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,798 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,798 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742714_1890, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_95.json 2018-07-21T05:32:32,803 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_95.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:32,821 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,822 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742715_1891, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/942be82b513d499c84e3021e4e04b6ba/96_descriptor.json 2018-07-21T05:32:32,826 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/942be82b513d499c84e3021e4e04b6ba/96_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,829 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,829 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742716_1892, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/96_index.zip 2018-07-21T05:32:32,834 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/942be82b513d499c84e3021e4e04b6ba/96_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,837 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:32,838 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,838 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742717_1893, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_96.json 2018-07-21T05:32:32,842 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_96.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,860 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,860 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,860 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:32,861 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,861 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,861 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,861 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,861 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,861 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742718_1894, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfbea455fd8d4c6396cb605612d790e5/97_descriptor.json 2018-07-21T05:32:32,865 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfbea455fd8d4c6396cb605612d790e5/97_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,867 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,868 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742719_1895, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/97_index.zip 2018-07-21T05:32:32,872 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfbea455fd8d4c6396cb605612d790e5/97_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,876 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,877 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742720_1896, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_97.json 2018-07-21T05:32:32,886 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_97.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,907 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:32,908 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:32,908 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742721_1897, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/afe93765f7b849998b2b26376daa0d97/98_descriptor.json 2018-07-21T05:32:32,913 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/afe93765f7b849998b2b26376daa0d97/98_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,915 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,915 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,916 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,916 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742722_1898, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/98_index.zip 2018-07-21T05:32:32,920 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/afe93765f7b849998b2b26376daa0d97/98_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,923 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,923 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:32,924 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,924 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742723_1899, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_98.json 2018-07-21T05:32:32,929 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_98.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:32,956 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,956 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742724_1900, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70ce4092ae6e4006be1f131a7ea85ef1/99_descriptor.json 2018-07-21T05:32:32,964 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70ce4092ae6e4006be1f131a7ea85ef1/99_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,967 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:32,967 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742725_1901, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/99_index.zip 2018-07-21T05:32:32,974 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70ce4092ae6e4006be1f131a7ea85ef1/99_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:32,978 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:32,978 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742726_1902, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_99.json 2018-07-21T05:32:32,984 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_99.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,005 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,005 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742727_1903, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e614305b23bf45f890f1c6fb6aa06261/100_descriptor.json 2018-07-21T05:32:33,010 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e614305b23bf45f890f1c6fb6aa06261/100_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,012 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,012 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742728_1904, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/100_index.zip 2018-07-21T05:32:33,017 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e614305b23bf45f890f1c6fb6aa06261/100_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,020 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,021 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,021 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742729_1905, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_100.json 2018-07-21T05:32:33,026 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_100.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,050 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,050 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,050 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,050 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:33,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,051 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742730_1906, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7ef17018d794cbc95e269536d1df7a4/101_descriptor.json 2018-07-21T05:32:33,057 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7ef17018d794cbc95e269536d1df7a4/101_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,059 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,060 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,060 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,060 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742731_1907, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/101_index.zip 2018-07-21T05:32:33,067 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f7ef17018d794cbc95e269536d1df7a4/101_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:33,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,071 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742732_1908, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_101.json 2018-07-21T05:32:33,078 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_101.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,107 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742733_1909, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0493d8bc6854c449490726c581e16bf/102_descriptor.json 2018-07-21T05:32:33,113 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0493d8bc6854c449490726c581e16bf/102_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,117 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,118 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742734_1910, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/102_index.zip 2018-07-21T05:32:33,127 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0493d8bc6854c449490726c581e16bf/102_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,131 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,131 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742735_1911, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_102.json 2018-07-21T05:32:33,138 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_102.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:33,159 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,159 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742736_1912, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48138707524f4e0bb18281df2f7b2f17/103_descriptor.json 2018-07-21T05:32:33,164 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48138707524f4e0bb18281df2f7b2f17/103_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:33,167 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,167 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742737_1913, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/103_index.zip 2018-07-21T05:32:33,177 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48138707524f4e0bb18281df2f7b2f17/103_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:33,182 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,182 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742738_1914, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_103.json 2018-07-21T05:32:33,189 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_103.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,210 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,210 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742739_1915, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df76a5bdad04b489f10a61f9f8e45f3/104_descriptor.json 2018-07-21T05:32:33,214 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df76a5bdad04b489f10a61f9f8e45f3/104_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,216 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,217 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,217 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742740_1916, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/104_index.zip 2018-07-21T05:32:33,224 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1df76a5bdad04b489f10a61f9f8e45f3/104_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,227 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,227 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742741_1917, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_104.json 2018-07-21T05:32:33,232 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_104.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,253 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,253 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,253 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,253 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,254 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,254 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,254 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,254 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,254 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,254 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742742_1918, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3283ef4a3a2c4794902ad708e2f9269b/105_descriptor.json 2018-07-21T05:32:33,454 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:33,665 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3283ef4a3a2c4794902ad708e2f9269b/105_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,668 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742743_1919, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/105_index.zip 2018-07-21T05:32:33,673 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3283ef4a3a2c4794902ad708e2f9269b/105_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,677 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742744_1920, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_105.json 2018-07-21T05:32:33,684 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_105.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,710 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742745_1921, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/572b7a3ec3ac492693989d5453fa0499/106_descriptor.json 2018-07-21T05:32:33,715 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/572b7a3ec3ac492693989d5453fa0499/106_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,717 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,717 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742746_1922, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/106_index.zip 2018-07-21T05:32:33,722 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/572b7a3ec3ac492693989d5453fa0499/106_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,726 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,726 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742747_1923, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_106.json 2018-07-21T05:32:33,731 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_106.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:33,750 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,750 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742748_1924, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b13263a7ae846cea9de1e6ab8cfcbbb/107_descriptor.json 2018-07-21T05:32:33,756 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b13263a7ae846cea9de1e6ab8cfcbbb/107_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,758 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,758 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742749_1925, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/107_index.zip 2018-07-21T05:32:33,763 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b13263a7ae846cea9de1e6ab8cfcbbb/107_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,767 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,767 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742750_1926, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_107.json 2018-07-21T05:32:33,772 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_107.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,796 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,796 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742751_1927, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2690e06250d8481b939729b1f8187579/108_descriptor.json 2018-07-21T05:32:33,801 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2690e06250d8481b939729b1f8187579/108_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,803 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,803 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742752_1928, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/108_index.zip 2018-07-21T05:32:33,808 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2690e06250d8481b939729b1f8187579/108_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,811 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,812 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742753_1929, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_108.json 2018-07-21T05:32:33,816 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_108.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,834 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,834 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742754_1930, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52bee9afb0dc42e5b848a7a1b0400af6/109_descriptor.json 2018-07-21T05:32:33,839 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52bee9afb0dc42e5b848a7a1b0400af6/109_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,845 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,845 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742755_1931, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/109_index.zip 2018-07-21T05:32:33,851 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52bee9afb0dc42e5b848a7a1b0400af6/109_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,855 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:33,856 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,856 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742756_1932, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_109.json 2018-07-21T05:32:33,863 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_109.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:33,882 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,882 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742757_1933, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98fcf3408efe4f3ab4c4fc1034e05bfa/110_descriptor.json 2018-07-21T05:32:33,887 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98fcf3408efe4f3ab4c4fc1034e05bfa/110_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,889 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,889 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742758_1934, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/110_index.zip 2018-07-21T05:32:33,894 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/98fcf3408efe4f3ab4c4fc1034e05bfa/110_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,898 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,898 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742759_1935, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_110.json 2018-07-21T05:32:33,906 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_110.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,927 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,927 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742760_1936, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/881642bfc244495ea2090950bf9e2412/111_descriptor.json 2018-07-21T05:32:33,931 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/881642bfc244495ea2090950bf9e2412/111_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:33,933 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,933 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742761_1937, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/111_index.zip 2018-07-21T05:32:33,937 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/881642bfc244495ea2090950bf9e2412/111_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,941 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,941 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742762_1938, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_111.json 2018-07-21T05:32:33,945 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_111.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,973 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:33,973 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742763_1939, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cac56e27f0594b2abfe3d5e9b04c7d72/112_descriptor.json 2018-07-21T05:32:33,979 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cac56e27f0594b2abfe3d5e9b04c7d72/112_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:33,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:33,982 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742764_1940, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/112_index.zip 2018-07-21T05:32:33,995 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cac56e27f0594b2abfe3d5e9b04c7d72/112_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,000 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,000 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742765_1941, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_112.json 2018-07-21T05:32:34,016 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_112.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,036 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,036 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742766_1942, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f363dc6b5de64426aa47b520858cf06f/113_descriptor.json 2018-07-21T05:32:34,042 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f363dc6b5de64426aa47b520858cf06f/113_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:34,044 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,044 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742767_1943, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/113_index.zip 2018-07-21T05:32:34,049 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f363dc6b5de64426aa47b520858cf06f/113_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,053 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,053 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742768_1944, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_113.json 2018-07-21T05:32:34,060 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_113.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,082 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742769_1945, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b93841f0d8e64824b7aa81aeec58696e/114_descriptor.json 2018-07-21T05:32:34,090 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b93841f0d8e64824b7aa81aeec58696e/114_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,093 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,093 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742770_1946, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/114_index.zip 2018-07-21T05:32:34,103 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b93841f0d8e64824b7aa81aeec58696e/114_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:34,107 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,107 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742771_1947, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_114.json 2018-07-21T05:32:34,113 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_114.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,135 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,135 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742772_1948, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/baa334d07a364fbabaf8b1178ea40b8e/115_descriptor.json 2018-07-21T05:32:34,146 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/baa334d07a364fbabaf8b1178ea40b8e/115_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:34,148 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,148 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742773_1949, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/115_index.zip 2018-07-21T05:32:34,154 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/baa334d07a364fbabaf8b1178ea40b8e/115_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:34,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,160 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742774_1950, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_115.json 2018-07-21T05:32:34,167 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_115.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,193 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,194 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,194 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742775_1951, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9986b73073354be78d9d55c8018bb742/116_descriptor.json 2018-07-21T05:32:34,207 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9986b73073354be78d9d55c8018bb742/116_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,210 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,210 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,210 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,210 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,210 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742776_1952, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/116_index.zip 2018-07-21T05:32:34,219 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9986b73073354be78d9d55c8018bb742/116_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,224 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,224 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742777_1953, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_116.json 2018-07-21T05:32:34,233 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_116.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,252 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,252 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742778_1954, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b30ad399319f40d1a71d40e6c2a4aa0c/117_descriptor.json 2018-07-21T05:32:34,257 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b30ad399319f40d1a71d40e6c2a4aa0c/117_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,261 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:34,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,262 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742779_1955, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/117_index.zip 2018-07-21T05:32:34,271 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b30ad399319f40d1a71d40e6c2a4aa0c/117_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,276 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742780_1956, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_117.json 2018-07-21T05:32:34,281 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_117.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,304 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,304 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742781_1957, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a95ac27729f249ccbf102ddb8e3fe19d/118_descriptor.json 2018-07-21T05:32:34,310 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a95ac27729f249ccbf102ddb8e3fe19d/118_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,313 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,313 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742782_1958, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/118_index.zip 2018-07-21T05:32:34,319 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a95ac27729f249ccbf102ddb8e3fe19d/118_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,328 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,328 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742783_1959, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_118.json 2018-07-21T05:32:34,341 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_118.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,364 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,364 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742784_1960, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/204b3be495fc43e8aa189aa735e30955/119_descriptor.json 2018-07-21T05:32:34,370 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/204b3be495fc43e8aa189aa735e30955/119_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,373 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,374 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742785_1961, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/119_index.zip 2018-07-21T05:32:34,385 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/204b3be495fc43e8aa189aa735e30955/119_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,389 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,390 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,390 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742786_1962, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_119.json 2018-07-21T05:32:34,399 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_119.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,424 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,424 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742787_1963, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48fcc2088f1a430cb7c1cc6eaf2ce645/120_descriptor.json 2018-07-21T05:32:34,429 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48fcc2088f1a430cb7c1cc6eaf2ce645/120_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,431 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,431 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742788_1964, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/120_index.zip 2018-07-21T05:32:34,435 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48fcc2088f1a430cb7c1cc6eaf2ce645/120_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,439 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,439 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742789_1965, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_120.json 2018-07-21T05:32:34,443 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_120.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,468 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,469 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742790_1966, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52fea51fb75344678b53f5fca6911f88/121_descriptor.json 2018-07-21T05:32:34,477 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52fea51fb75344678b53f5fca6911f88/121_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:34,480 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,480 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742791_1967, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/121_index.zip 2018-07-21T05:32:34,486 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/52fea51fb75344678b53f5fca6911f88/121_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:34,490 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,491 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742792_1968, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_121.json 2018-07-21T05:32:34,499 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_121.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:34,518 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,518 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742793_1969, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0f4d3c5280d543e0a41c5ad4033d0f18/122_descriptor.json 2018-07-21T05:32:34,532 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0f4d3c5280d543e0a41c5ad4033d0f18/122_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:34,535 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,535 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742794_1970, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/122_index.zip 2018-07-21T05:32:34,541 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0f4d3c5280d543e0a41c5ad4033d0f18/122_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:34,544 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,545 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742795_1971, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_122.json 2018-07-21T05:32:34,554 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_122.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,587 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,588 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742796_1972, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/87556800e0554314a67e1e9f8acb0aa4/123_descriptor.json 2018-07-21T05:32:34,595 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/87556800e0554314a67e1e9f8acb0aa4/123_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:34,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:34,604 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,604 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742797_1973, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/123_index.zip 2018-07-21T05:32:34,616 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/87556800e0554314a67e1e9f8acb0aa4/123_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:34,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:34,622 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742798_1974, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_123.json 2018-07-21T05:32:34,635 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_123.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:34,653 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,653 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:34,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:34,654 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742799_1975, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d03b4b47a9d146bba40b048c5c06e928/124_descriptor.json 2018-07-21T05:32:35,063 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d03b4b47a9d146bba40b048c5c06e928/124_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:35,066 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,066 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742800_1976, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/124_index.zip 2018-07-21T05:32:35,071 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d03b4b47a9d146bba40b048c5c06e928/124_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:35,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,075 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742801_1977, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_124.json 2018-07-21T05:32:35,079 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_124.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:35,097 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,097 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742802_1978, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/74e2664474a94fb7b921d17188d8b60f/125_descriptor.json 2018-07-21T05:32:35,505 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/74e2664474a94fb7b921d17188d8b60f/125_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:35,508 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,508 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742803_1979, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/125_index.zip 2018-07-21T05:32:35,513 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/74e2664474a94fb7b921d17188d8b60f/125_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:35,517 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:35,518 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742804_1980, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_125.json 2018-07-21T05:32:35,522 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_125.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:35,539 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:35,539 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742805_1981, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d796b4f1af914402919bd82189b8199b/126_descriptor.json 2018-07-21T05:32:35,543 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d796b4f1af914402919bd82189b8199b/126_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,545 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,545 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,545 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:35,545 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,545 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:35,546 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,546 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742806_1982, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/126_index.zip 2018-07-21T05:32:35,556 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d796b4f1af914402919bd82189b8199b/126_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:35,560 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:35,561 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742807_1983, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_126.json 2018-07-21T05:32:35,569 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_126.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:35,590 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,590 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742808_1984, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a736ffbb77184a70b7d753f324365a37/127_descriptor.json 2018-07-21T05:32:35,594 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a736ffbb77184a70b7d753f324365a37/127_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,597 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:35,597 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742809_1985, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/127_index.zip 2018-07-21T05:32:35,601 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a736ffbb77184a70b7d753f324365a37/127_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:35,605 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:35,605 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742810_1986, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_127.json 2018-07-21T05:32:35,614 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_127.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:35,647 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:35,648 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:35,648 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742811_1987, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/55cedad7784947ca910d7b4d5a8930e9/128_descriptor.json 2018-07-21T05:32:36,056 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/55cedad7784947ca910d7b4d5a8930e9/128_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:36,059 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,059 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742812_1988, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/128_index.zip 2018-07-21T05:32:36,064 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/55cedad7784947ca910d7b4d5a8930e9/128_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,068 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,068 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742813_1989, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_128.json 2018-07-21T05:32:36,076 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_128.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,096 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,096 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742814_1990, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d65f722c98024de7a7fea466b19aab70/129_descriptor.json 2018-07-21T05:32:36,101 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d65f722c98024de7a7fea466b19aab70/129_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:36,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,103 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742815_1991, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/129_index.zip 2018-07-21T05:32:36,111 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d65f722c98024de7a7fea466b19aab70/129_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,115 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742816_1992, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_129.json 2018-07-21T05:32:36,119 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_129.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,145 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742817_1993, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5876cff77c0f4401b9a069e9cb9a664c/130_descriptor.json 2018-07-21T05:32:36,149 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5876cff77c0f4401b9a069e9cb9a664c/130_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,152 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,152 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742818_1994, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/130_index.zip 2018-07-21T05:32:36,156 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5876cff77c0f4401b9a069e9cb9a664c/130_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,160 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,160 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742819_1995, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_130.json 2018-07-21T05:32:36,164 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_130.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,186 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,186 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742820_1996, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91eebcbf69bd4169813ea7ec123f76c8/131_descriptor.json 2018-07-21T05:32:36,191 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91eebcbf69bd4169813ea7ec123f76c8/131_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,193 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,193 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742821_1997, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/131_index.zip 2018-07-21T05:32:36,198 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91eebcbf69bd4169813ea7ec123f76c8/131_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,202 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,202 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742822_1998, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_131.json 2018-07-21T05:32:36,206 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_131.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,225 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742823_1999, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81f05bcac52840da962acc2f725bf3b2/132_descriptor.json 2018-07-21T05:32:36,230 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81f05bcac52840da962acc2f725bf3b2/132_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,232 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,232 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742824_2000, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/132_index.zip 2018-07-21T05:32:36,237 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/81f05bcac52840da962acc2f725bf3b2/132_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,241 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,241 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742825_2001, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_132.json 2018-07-21T05:32:36,245 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_132.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,263 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:36,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,264 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742826_2002, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a5f4476a4f84e5aa2e61940f7cfc9e4/133_descriptor.json 2018-07-21T05:32:36,268 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a5f4476a4f84e5aa2e61940f7cfc9e4/133_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,270 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,270 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742827_2003, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/133_index.zip 2018-07-21T05:32:36,274 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a5f4476a4f84e5aa2e61940f7cfc9e4/133_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,277 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,277 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742828_2004, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_133.json 2018-07-21T05:32:36,282 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_133.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,303 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,303 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742829_2005, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bc9cd2b1bad6473fa9d086be2c9f81df/134_descriptor.json 2018-07-21T05:32:36,308 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bc9cd2b1bad6473fa9d086be2c9f81df/134_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,310 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,310 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742830_2006, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/134_index.zip 2018-07-21T05:32:36,314 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bc9cd2b1bad6473fa9d086be2c9f81df/134_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,318 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742831_2007, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_134.json 2018-07-21T05:32:36,323 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_134.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,342 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,342 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742832_2008, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e78ad89d277449ead0b74b7c3242b94/135_descriptor.json 2018-07-21T05:32:36,346 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e78ad89d277449ead0b74b7c3242b94/135_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,348 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,349 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742833_2009, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/135_index.zip 2018-07-21T05:32:36,353 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e78ad89d277449ead0b74b7c3242b94/135_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,356 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,356 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742834_2010, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_135.json 2018-07-21T05:32:36,360 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_135.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,385 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:36,386 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,386 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742835_2011, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/14ade14214ea4ffab24bd0d045ce193a/136_descriptor.json 2018-07-21T05:32:36,470 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:36,791 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/14ade14214ea4ffab24bd0d045ce193a/136_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:36,798 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,798 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742836_2012, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/136_index.zip 2018-07-21T05:32:36,804 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/14ade14214ea4ffab24bd0d045ce193a/136_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:36,808 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,808 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742837_2013, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_136.json 2018-07-21T05:32:36,819 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_136.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,841 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,841 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742838_2014, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6fa26359c1644ddbad662771e4b7f40/137_descriptor.json 2018-07-21T05:32:36,848 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6fa26359c1644ddbad662771e4b7f40/137_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:36,850 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,850 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742839_2015, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/137_index.zip 2018-07-21T05:32:36,857 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6fa26359c1644ddbad662771e4b7f40/137_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,862 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,863 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742840_2016, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_137.json 2018-07-21T05:32:36,870 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_137.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,896 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,897 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742841_2017, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/43f313b128f944028b6f59fc2934486a/138_descriptor.json 2018-07-21T05:32:36,901 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/43f313b128f944028b6f59fc2934486a/138_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:36,907 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:36,907 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742842_2018, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/138_index.zip 2018-07-21T05:32:36,912 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/43f313b128f944028b6f59fc2934486a/138_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:36,917 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,917 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742843_2019, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_138.json 2018-07-21T05:32:36,924 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_138.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,951 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,951 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:36,952 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,952 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742844_2020, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ade9cda9150645869c88b7241c5ae2c5/139_descriptor.json 2018-07-21T05:32:36,966 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ade9cda9150645869c88b7241c5ae2c5/139_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,968 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,968 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:36,969 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,969 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742845_2021, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/139_index.zip 2018-07-21T05:32:36,976 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ade9cda9150645869c88b7241c5ae2c5/139_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:36,981 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:36,982 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:36,982 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:36,982 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742846_2022, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_139.json 2018-07-21T05:32:36,995 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_139.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,030 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,030 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742847_2023, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed96741b650b4809a082c249f8d39948/140_descriptor.json 2018-07-21T05:32:37,038 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed96741b650b4809a082c249f8d39948/140_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,041 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,041 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742848_2024, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/140_index.zip 2018-07-21T05:32:37,061 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ed96741b650b4809a082c249f8d39948/140_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,065 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,066 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742849_2025, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_140.json 2018-07-21T05:32:37,079 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_140.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,098 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,098 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742850_2026, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2d36237149d84eaf8d84af550f37b60f/141_descriptor.json 2018-07-21T05:32:37,104 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2d36237149d84eaf8d84af550f37b60f/141_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,107 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,107 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742851_2027, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/141_index.zip 2018-07-21T05:32:37,113 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2d36237149d84eaf8d84af550f37b60f/141_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,117 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,117 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742852_2028, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_141.json 2018-07-21T05:32:37,522 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_141.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:37,543 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,543 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742853_2029, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ba7d0f8bbc9498194fd986516d1d685/142_descriptor.json 2018-07-21T05:32:37,551 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ba7d0f8bbc9498194fd986516d1d685/142_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,556 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,557 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,557 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742854_2030, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/142_index.zip 2018-07-21T05:32:37,569 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0ba7d0f8bbc9498194fd986516d1d685/142_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,573 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,574 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742855_2031, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_142.json 2018-07-21T05:32:37,579 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_142.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,600 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,600 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742856_2032, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/270424b9eb5840239b01fd305b76ca92/143_descriptor.json 2018-07-21T05:32:37,610 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/270424b9eb5840239b01fd305b76ca92/143_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,613 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742857_2033, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/143_index.zip 2018-07-21T05:32:37,621 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/270424b9eb5840239b01fd305b76ca92/143_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,624 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,624 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742858_2034, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_143.json 2018-07-21T05:32:37,629 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_143.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,651 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,651 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,652 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,652 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742859_2035, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eba673334454f60baef9c39da962d6c/144_descriptor.json 2018-07-21T05:32:37,661 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eba673334454f60baef9c39da962d6c/144_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,666 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,666 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,667 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,667 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742860_2036, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/144_index.zip 2018-07-21T05:32:37,671 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eba673334454f60baef9c39da962d6c/144_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,675 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,675 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742861_2037, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_144.json 2018-07-21T05:32:37,680 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_144.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,705 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,705 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742862_2038, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a35f7b16eef747f0881bb67d3eca09c7/145_descriptor.json 2018-07-21T05:32:37,710 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a35f7b16eef747f0881bb67d3eca09c7/145_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,712 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,712 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742863_2039, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/145_index.zip 2018-07-21T05:32:37,721 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a35f7b16eef747f0881bb67d3eca09c7/145_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,724 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:37,725 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,725 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742864_2040, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_145.json 2018-07-21T05:32:37,729 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_145.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:37,749 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,749 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742865_2041, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88ba35e7044949c28897b9c1d04e640a/146_descriptor.json 2018-07-21T05:32:37,753 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88ba35e7044949c28897b9c1d04e640a/146_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,755 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,755 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742866_2042, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/146_index.zip 2018-07-21T05:32:37,762 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88ba35e7044949c28897b9c1d04e640a/146_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,766 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,766 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742867_2043, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_146.json 2018-07-21T05:32:37,771 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_146.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,798 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,798 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742868_2044, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895541bb085c459a95175299062b47b6/147_descriptor.json 2018-07-21T05:32:37,816 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895541bb085c459a95175299062b47b6/147_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,818 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:37,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,819 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742869_2045, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/147_index.zip 2018-07-21T05:32:37,832 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895541bb085c459a95175299062b47b6/147_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,837 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,838 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742870_2046, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_147.json 2018-07-21T05:32:37,855 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_147.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,875 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,875 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742871_2047, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ada0e11c99e6429c86faa6c1fd02392c/148_descriptor.json 2018-07-21T05:32:37,882 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ada0e11c99e6429c86faa6c1fd02392c/148_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,884 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,885 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742872_2048, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/148_index.zip 2018-07-21T05:32:37,889 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ada0e11c99e6429c86faa6c1fd02392c/148_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:37,893 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,894 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742873_2049, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_148.json 2018-07-21T05:32:37,898 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_148.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,915 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,915 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742874_2050, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/130119a806b949a38de77473fffea917/149_descriptor.json 2018-07-21T05:32:37,920 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/130119a806b949a38de77473fffea917/149_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:37,922 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,922 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742875_2051, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/149_index.zip 2018-07-21T05:32:37,926 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/130119a806b949a38de77473fffea917/149_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:37,929 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,929 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742876_2052, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_149.json 2018-07-21T05:32:37,934 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_149.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:37,954 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,954 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742877_2053, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48750c37b07c47b69c64a227d072b735/150_descriptor.json 2018-07-21T05:32:37,959 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48750c37b07c47b69c64a227d072b735/150_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,961 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,961 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742878_2054, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/150_index.zip 2018-07-21T05:32:37,968 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/48750c37b07c47b69c64a227d072b735/150_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:37,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:37,972 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742879_2055, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_150.json 2018-07-21T05:32:37,977 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_150.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:37,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:37,995 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742880_2056, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bafc959b33d4b72822bcc0e25e47534/151_descriptor.json 2018-07-21T05:32:37,999 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bafc959b33d4b72822bcc0e25e47534/151_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,001 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,002 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,002 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742881_2057, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/151_index.zip 2018-07-21T05:32:38,005 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bafc959b33d4b72822bcc0e25e47534/151_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:38,011 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,011 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742882_2058, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_151.json 2018-07-21T05:32:38,015 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_151.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:38,032 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,032 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742883_2059, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/427ac788f7714bc2831d63a60b6ba56e/152_descriptor.json 2018-07-21T05:32:38,036 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/427ac788f7714bc2831d63a60b6ba56e/152_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:38,038 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,039 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742884_2060, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/152_index.zip 2018-07-21T05:32:38,043 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/427ac788f7714bc2831d63a60b6ba56e/152_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,046 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,046 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,046 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,046 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:38,047 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,047 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742885_2061, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_152.json 2018-07-21T05:32:38,053 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_152.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,077 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,077 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742886_2062, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/177fe106513849b4ad7236130d4c866b/153_descriptor.json 2018-07-21T05:32:38,088 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/177fe106513849b4ad7236130d4c866b/153_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:38,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,092 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742887_2063, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/153_index.zip 2018-07-21T05:32:38,098 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/177fe106513849b4ad7236130d4c866b/153_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:38,109 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,109 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742888_2064, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_153.json 2018-07-21T05:32:38,114 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_153.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,135 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,135 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742889_2065, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71e32183fd904a0ebccf28ef778f022f/154_descriptor.json 2018-07-21T05:32:38,140 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71e32183fd904a0ebccf28ef778f022f/154_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,153 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,153 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742890_2066, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/154_index.zip 2018-07-21T05:32:38,163 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/71e32183fd904a0ebccf28ef778f022f/154_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:38,172 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,172 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742891_2067, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_154.json 2018-07-21T05:32:38,180 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_154.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,213 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742892_2068, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3407927a965c439cbad99887ea812e2e/155_descriptor.json 2018-07-21T05:32:38,218 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3407927a965c439cbad99887ea812e2e/155_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:38,221 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,221 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742893_2069, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/155_index.zip 2018-07-21T05:32:38,226 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3407927a965c439cbad99887ea812e2e/155_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,235 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,235 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742894_2070, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_155.json 2018-07-21T05:32:38,269 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_155.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,307 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,307 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742895_2071, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/13608f94e81a435b87a752dfd78e40e3/156_descriptor.json 2018-07-21T05:32:38,327 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/13608f94e81a435b87a752dfd78e40e3/156_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,332 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,333 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,333 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742896_2072, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/156_index.zip 2018-07-21T05:32:38,340 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/13608f94e81a435b87a752dfd78e40e3/156_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:38,345 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,345 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742897_2073, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_156.json 2018-07-21T05:32:38,352 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_156.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,371 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742898_2074, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/473ce1f0ce534d9e8ebc278767437463/157_descriptor.json 2018-07-21T05:32:38,777 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/473ce1f0ce534d9e8ebc278767437463/157_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,779 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,780 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:38,780 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,780 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742899_2075, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/157_index.zip 2018-07-21T05:32:38,785 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/473ce1f0ce534d9e8ebc278767437463/157_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,789 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,789 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742900_2076, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_157.json 2018-07-21T05:32:38,793 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_157.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:38,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,813 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742901_2077, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/42717c52093944c19e2834fefb7ae66c/158_descriptor.json 2018-07-21T05:32:38,817 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/42717c52093944c19e2834fefb7ae66c/158_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,819 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,820 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742902_2078, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/158_index.zip 2018-07-21T05:32:38,825 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/42717c52093944c19e2834fefb7ae66c/158_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,828 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742903_2079, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_158.json 2018-07-21T05:32:38,833 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_158.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,855 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,856 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742904_2080, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3710560060c74c01914db65ae43e8217/159_descriptor.json 2018-07-21T05:32:38,874 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3710560060c74c01914db65ae43e8217/159_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,876 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,876 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742905_2081, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/159_index.zip 2018-07-21T05:32:38,884 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3710560060c74c01914db65ae43e8217/159_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,887 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,887 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:38,888 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,888 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742906_2082, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_159.json 2018-07-21T05:32:38,906 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_159.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:38,933 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,933 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742907_2083, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d24f9b9ab841448d81842be0a4ed1e10/160_descriptor.json 2018-07-21T05:32:38,938 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d24f9b9ab841448d81842be0a4ed1e10/160_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,940 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742908_2084, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/160_index.zip 2018-07-21T05:32:38,946 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d24f9b9ab841448d81842be0a4ed1e10/160_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,949 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,949 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,949 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:38,950 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:38,950 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742909_2085, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_160.json 2018-07-21T05:32:38,958 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_160.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:38,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:38,986 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742910_2086, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d4c7927db24410dbc1daf82102bf34e/161_descriptor.json 2018-07-21T05:32:38,993 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d4c7927db24410dbc1daf82102bf34e/161_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:38,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:38,996 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742911_2087, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/161_index.zip 2018-07-21T05:32:39,001 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9d4c7927db24410dbc1daf82102bf34e/161_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,012 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,012 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742912_2088, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_161.json 2018-07-21T05:32:39,018 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_161.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:39,040 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,040 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742913_2089, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e0aa526fdb94c29a66e6dca8a68e143/162_descriptor.json 2018-07-21T05:32:39,046 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e0aa526fdb94c29a66e6dca8a68e143/162_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:39,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,049 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742914_2090, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/162_index.zip 2018-07-21T05:32:39,060 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e0aa526fdb94c29a66e6dca8a68e143/162_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:39,065 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,065 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742915_2091, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_162.json 2018-07-21T05:32:39,070 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_162.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,100 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,100 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742916_2092, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/526a20b8461b452d9f7c90a4042a30d7/163_descriptor.json 2018-07-21T05:32:39,107 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/526a20b8461b452d9f7c90a4042a30d7/163_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,109 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:39,110 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,110 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742917_2093, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/163_index.zip 2018-07-21T05:32:39,120 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/526a20b8461b452d9f7c90a4042a30d7/163_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,125 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,125 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,125 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:39,126 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,126 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742918_2094, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_163.json 2018-07-21T05:32:39,133 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_163.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,158 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,158 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742919_2095, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/702c7e0e1d5445cf8f696ec62258e76f/164_descriptor.json 2018-07-21T05:32:39,178 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/702c7e0e1d5445cf8f696ec62258e76f/164_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,183 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,184 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,184 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742920_2096, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/164_index.zip 2018-07-21T05:32:39,200 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/702c7e0e1d5445cf8f696ec62258e76f/164_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,214 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,214 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742921_2097, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_164.json 2018-07-21T05:32:39,219 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_164.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,239 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,239 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742922_2098, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22d2d873300d4a8d997c49e49024d9d4/165_descriptor.json 2018-07-21T05:32:39,249 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22d2d873300d4a8d997c49e49024d9d4/165_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,256 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,256 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742923_2099, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/165_index.zip 2018-07-21T05:32:39,263 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/22d2d873300d4a8d997c49e49024d9d4/165_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,268 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742924_2100, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_165.json 2018-07-21T05:32:39,274 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_165.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,296 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,296 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:39,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,297 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742925_2101, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b8125318008e438d87b5581f786f3ada/166_descriptor.json 2018-07-21T05:32:39,309 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b8125318008e438d87b5581f786f3ada/166_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,311 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,311 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742926_2102, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/166_index.zip 2018-07-21T05:32:39,325 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b8125318008e438d87b5581f786f3ada/166_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:39,329 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,329 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742927_2103, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_166.json 2018-07-21T05:32:39,341 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_166.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:39,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:39,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,363 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742928_2104, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f5d50f18a86485796c9f94884e9d19d/167_descriptor.json 2018-07-21T05:32:39,367 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f5d50f18a86485796c9f94884e9d19d/167_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:39,370 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,370 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742929_2105, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/167_index.zip 2018-07-21T05:32:39,484 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:39,775 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f5d50f18a86485796c9f94884e9d19d/167_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:39,779 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,779 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742930_2106, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_167.json 2018-07-21T05:32:39,784 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_167.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:39,807 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,807 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742931_2107, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d56b1bc398724fa6adf00491c4c6331a/168_descriptor.json 2018-07-21T05:32:39,831 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d56b1bc398724fa6adf00491c4c6331a/168_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:39,832 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,832 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742932_2108, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/168_index.zip 2018-07-21T05:32:39,838 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d56b1bc398724fa6adf00491c4c6331a/168_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,842 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,842 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742933_2109, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_168.json 2018-07-21T05:32:39,846 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_168.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,866 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:39,867 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,867 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742934_2110, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a14b0e2aa4644e09a9b4700ac7f9711/169_descriptor.json 2018-07-21T05:32:39,880 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a14b0e2aa4644e09a9b4700ac7f9711/169_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:39,884 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,884 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742935_2111, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/169_index.zip 2018-07-21T05:32:39,889 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a14b0e2aa4644e09a9b4700ac7f9711/169_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,896 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,896 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742936_2112, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_169.json 2018-07-21T05:32:39,908 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_169.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,934 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,934 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:39,935 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,935 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742937_2113, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6435536f94e48c29a60a8ad77c59aab/170_descriptor.json 2018-07-21T05:32:39,940 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6435536f94e48c29a60a8ad77c59aab/170_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,943 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742938_2114, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/170_index.zip 2018-07-21T05:32:39,949 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a6435536f94e48c29a60a8ad77c59aab/170_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:39,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:39,954 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742939_2115, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_170.json 2018-07-21T05:32:39,965 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_170.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:39,986 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:39,986 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742940_2116, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b330de203c424d129963fedbd175e47b/171_descriptor.json 2018-07-21T05:32:40,007 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b330de203c424d129963fedbd175e47b/171_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,010 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,010 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742941_2117, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/171_index.zip 2018-07-21T05:32:40,016 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b330de203c424d129963fedbd175e47b/171_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,020 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,021 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,021 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742942_2118, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_171.json 2018-07-21T05:32:40,026 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_171.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:40,058 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,058 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742943_2119, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a6b48b8a1af485fad7c226730d2bf9f/172_descriptor.json 2018-07-21T05:32:40,063 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a6b48b8a1af485fad7c226730d2bf9f/172_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,066 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,067 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742944_2120, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/172_index.zip 2018-07-21T05:32:40,076 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6a6b48b8a1af485fad7c226730d2bf9f/172_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,080 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,080 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742945_2121, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_172.json 2018-07-21T05:32:40,085 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_172.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:40,105 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,105 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742946_2122, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70c75a85c70540bea74058977eea23f9/173_descriptor.json 2018-07-21T05:32:40,110 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70c75a85c70540bea74058977eea23f9/173_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:40,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,114 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742947_2123, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/173_index.zip 2018-07-21T05:32:40,126 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/70c75a85c70540bea74058977eea23f9/173_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,130 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,130 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742948_2124, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_173.json 2018-07-21T05:32:40,140 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_173.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,166 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,166 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742949_2125, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de4d723c35704314aef5da64809920cd/174_descriptor.json 2018-07-21T05:32:40,179 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de4d723c35704314aef5da64809920cd/174_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,182 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,182 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,183 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,183 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742950_2126, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/174_index.zip 2018-07-21T05:32:40,190 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de4d723c35704314aef5da64809920cd/174_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,194 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,194 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742951_2127, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_174.json 2018-07-21T05:32:40,208 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_174.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,237 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,237 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,238 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,238 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742952_2128, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d052f96272d0434fa5b2f3e60317a535/175_descriptor.json 2018-07-21T05:32:40,246 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d052f96272d0434fa5b2f3e60317a535/175_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,249 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,249 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742953_2129, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/175_index.zip 2018-07-21T05:32:40,259 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d052f96272d0434fa5b2f3e60317a535/175_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,262 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,263 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,263 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,263 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742954_2130, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_175.json 2018-07-21T05:32:40,275 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_175.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:40,306 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:40,306 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742955_2131, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b7952e05add1492ca9346f22b02c14d5/176_descriptor.json 2018-07-21T05:32:40,318 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b7952e05add1492ca9346f22b02c14d5/176_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,325 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,325 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742956_2132, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/176_index.zip 2018-07-21T05:32:40,334 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b7952e05add1492ca9346f22b02c14d5/176_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:40,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,343 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742957_2133, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_176.json 2018-07-21T05:32:40,360 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_176.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:40,388 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,388 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742958_2134, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8d1930f4b39a41cea88b4c9a33b69926/177_descriptor.json 2018-07-21T05:32:40,393 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8d1930f4b39a41cea88b4c9a33b69926/177_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:40,403 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,403 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742959_2135, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/177_index.zip 2018-07-21T05:32:40,411 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8d1930f4b39a41cea88b4c9a33b69926/177_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,415 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,416 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,416 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,416 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,416 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,416 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,416 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742960_2136, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_177.json 2018-07-21T05:32:40,421 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_177.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,445 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,445 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742961_2137, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb77bb938e4841e983e203ba46c49280/178_descriptor.json 2018-07-21T05:32:40,450 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb77bb938e4841e983e203ba46c49280/178_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,454 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:40,455 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,455 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742962_2138, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/178_index.zip 2018-07-21T05:32:40,460 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb77bb938e4841e983e203ba46c49280/178_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,463 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,463 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:40,464 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,464 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742963_2139, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_178.json 2018-07-21T05:32:40,868 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_178.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:40,894 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,894 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742964_2140, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b56310f74190456a9c5806f9ba06cf66/179_descriptor.json 2018-07-21T05:32:40,900 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b56310f74190456a9c5806f9ba06cf66/179_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,902 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,903 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742965_2141, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/179_index.zip 2018-07-21T05:32:40,911 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b56310f74190456a9c5806f9ba06cf66/179_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,917 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742966_2142, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_179.json 2018-07-21T05:32:40,923 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_179.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:40,942 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,942 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742967_2143, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b428a65682b461ba56c9a05a3a4e12f/180_descriptor.json 2018-07-21T05:32:40,947 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b428a65682b461ba56c9a05a3a4e12f/180_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:40,950 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,950 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742968_2144, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/180_index.zip 2018-07-21T05:32:40,958 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b428a65682b461ba56c9a05a3a4e12f/180_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:40,962 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:40,963 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742969_2145, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_180.json 2018-07-21T05:32:40,968 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_180.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:40,991 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:40,991 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742970_2146, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4f00ea715714460b4e46ff62b6c44a8/181_descriptor.json 2018-07-21T05:32:41,006 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4f00ea715714460b4e46ff62b6c44a8/181_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,009 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,009 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742971_2147, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/181_index.zip 2018-07-21T05:32:41,021 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4f00ea715714460b4e46ff62b6c44a8/181_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,025 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742972_2148, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_181.json 2018-07-21T05:32:41,033 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_181.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,052 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,052 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742973_2149, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d073121787cc41078ff8edd69eb5f925/182_descriptor.json 2018-07-21T05:32:41,063 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d073121787cc41078ff8edd69eb5f925/182_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,065 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,065 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742974_2150, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/182_index.zip 2018-07-21T05:32:41,082 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d073121787cc41078ff8edd69eb5f925/182_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,102 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742975_2151, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_182.json 2018-07-21T05:32:41,111 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_182.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,139 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,139 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742976_2152, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4a174658ff743e89fad9d30fe3e7eec/183_descriptor.json 2018-07-21T05:32:41,168 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4a174658ff743e89fad9d30fe3e7eec/183_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,171 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,171 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742977_2153, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/183_index.zip 2018-07-21T05:32:41,177 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4a174658ff743e89fad9d30fe3e7eec/183_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,184 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,184 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742978_2154, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_183.json 2018-07-21T05:32:41,199 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_183.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,226 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,226 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742979_2155, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/556b3cb2cfbd400ea1784c67851652e2/184_descriptor.json 2018-07-21T05:32:41,246 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/556b3cb2cfbd400ea1784c67851652e2/184_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,249 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,249 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,250 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,250 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742980_2156, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/184_index.zip 2018-07-21T05:32:41,276 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/556b3cb2cfbd400ea1784c67851652e2/184_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,289 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,289 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742981_2157, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_184.json 2018-07-21T05:32:41,297 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_184.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:41,321 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,321 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742982_2158, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/824556f71d3f4478a4b6168a9c6cbe0b/185_descriptor.json 2018-07-21T05:32:41,329 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/824556f71d3f4478a4b6168a9c6cbe0b/185_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,331 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,331 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742983_2159, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/185_index.zip 2018-07-21T05:32:41,338 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/824556f71d3f4478a4b6168a9c6cbe0b/185_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,342 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,342 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742984_2160, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_185.json 2018-07-21T05:32:41,347 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_185.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,381 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,381 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742985_2161, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff9b0f7b66e4faea26c5f7948d5b503/186_descriptor.json 2018-07-21T05:32:41,394 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff9b0f7b66e4faea26c5f7948d5b503/186_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,396 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,397 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742986_2162, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/186_index.zip 2018-07-21T05:32:41,402 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff9b0f7b66e4faea26c5f7948d5b503/186_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,406 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,406 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742987_2163, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_186.json 2018-07-21T05:32:41,414 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_186.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,440 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,440 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742988_2164, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895e491f9de8402f913cb2c46605b5f7/187_descriptor.json 2018-07-21T05:32:41,446 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895e491f9de8402f913cb2c46605b5f7/187_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,448 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742989_2165, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/187_index.zip 2018-07-21T05:32:41,454 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/895e491f9de8402f913cb2c46605b5f7/187_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,465 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,465 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742990_2166, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_187.json 2018-07-21T05:32:41,471 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_187.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,493 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,493 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742991_2167, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9dca8848b01541af86f43b04dcb1d567/188_descriptor.json 2018-07-21T05:32:41,508 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9dca8848b01541af86f43b04dcb1d567/188_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,510 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,510 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742992_2168, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/188_index.zip 2018-07-21T05:32:41,516 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9dca8848b01541af86f43b04dcb1d567/188_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,522 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,522 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742993_2169, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_188.json 2018-07-21T05:32:41,528 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_188.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:41,558 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,558 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742994_2170, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8bb0a8878ed49d78defc19b4e62f0c5/189_descriptor.json 2018-07-21T05:32:41,564 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8bb0a8878ed49d78defc19b4e62f0c5/189_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,566 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,566 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742995_2171, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/189_index.zip 2018-07-21T05:32:41,571 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8bb0a8878ed49d78defc19b4e62f0c5/189_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,575 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,575 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742996_2172, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_189.json 2018-07-21T05:32:41,579 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_189.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,606 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,606 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,606 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,606 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,607 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,607 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,607 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,607 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,607 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742997_2173, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49597fc11b4c4f688c42455460ab140e/190_descriptor.json 2018-07-21T05:32:41,611 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49597fc11b4c4f688c42455460ab140e/190_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,613 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,614 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742998_2174, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/190_index.zip 2018-07-21T05:32:41,618 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49597fc11b4c4f688c42455460ab140e/190_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,626 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,626 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073742999_2175, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_190.json 2018-07-21T05:32:41,634 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_190.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,652 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,652 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743000_2176, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/99a431f0a3b54b5c9d65f812655fdb30/191_descriptor.json 2018-07-21T05:32:41,656 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/99a431f0a3b54b5c9d65f812655fdb30/191_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,658 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,658 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743001_2177, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/191_index.zip 2018-07-21T05:32:41,661 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/99a431f0a3b54b5c9d65f812655fdb30/191_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,665 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,665 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743002_2178, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_191.json 2018-07-21T05:32:41,668 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_191.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,685 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,685 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743003_2179, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b5b75350f99441c89445e1e2344b9da6/192_descriptor.json 2018-07-21T05:32:41,689 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b5b75350f99441c89445e1e2344b9da6/192_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,691 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,692 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743004_2180, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/192_index.zip 2018-07-21T05:32:41,696 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b5b75350f99441c89445e1e2344b9da6/192_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,700 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,700 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743005_2181, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_192.json 2018-07-21T05:32:41,706 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_192.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,727 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,727 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,727 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,727 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,727 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,728 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,728 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,728 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,728 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,728 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743006_2182, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/03f5dd359ad346a39fbdd3b5fb9b0f2d/193_descriptor.json 2018-07-21T05:32:41,734 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/03f5dd359ad346a39fbdd3b5fb9b0f2d/193_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,748 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,749 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743007_2183, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/193_index.zip 2018-07-21T05:32:41,774 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/03f5dd359ad346a39fbdd3b5fb9b0f2d/193_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:41,782 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,782 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,782 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,782 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,782 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743008_2184, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_193.json 2018-07-21T05:32:41,787 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_193.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,810 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743009_2185, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb75db5dbfac4db6a0b7b2b81c3496c5/194_descriptor.json 2018-07-21T05:32:41,817 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb75db5dbfac4db6a0b7b2b81c3496c5/194_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,820 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,820 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743010_2186, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/194_index.zip 2018-07-21T05:32:41,827 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb75db5dbfac4db6a0b7b2b81c3496c5/194_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,838 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,838 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743011_2187, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_194.json 2018-07-21T05:32:41,853 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_194.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,877 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,877 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743012_2188, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cbe96d6ceefd4ed5acac3dc42467695d/195_descriptor.json 2018-07-21T05:32:41,885 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cbe96d6ceefd4ed5acac3dc42467695d/195_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,887 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,888 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743013_2189, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/195_index.zip 2018-07-21T05:32:41,894 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cbe96d6ceefd4ed5acac3dc42467695d/195_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,902 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,902 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743014_2190, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_195.json 2018-07-21T05:32:41,907 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_195.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,928 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,928 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,929 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,929 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743015_2191, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62814f7d17654bc8a00c41a1678883f5/196_descriptor.json 2018-07-21T05:32:41,933 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62814f7d17654bc8a00c41a1678883f5/196_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:41,935 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:41,936 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743016_2192, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/196_index.zip 2018-07-21T05:32:41,940 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/62814f7d17654bc8a00c41a1678883f5/196_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:41,944 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,944 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743017_2193, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_196.json 2018-07-21T05:32:41,948 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_196.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:41,965 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,965 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743018_2194, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bbfb9d5e5eb4c9ba01b1883731dca8b/197_descriptor.json 2018-07-21T05:32:41,973 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bbfb9d5e5eb4c9ba01b1883731dca8b/197_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:41,975 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,976 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743019_2195, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/197_index.zip 2018-07-21T05:32:41,980 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3bbfb9d5e5eb4c9ba01b1883731dca8b/197_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:41,983 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:41,983 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743020_2196, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_197.json 2018-07-21T05:32:41,990 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_197.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,021 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,021 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743021_2197, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5400273fb20e434abca12e4acbf15178/198_descriptor.json 2018-07-21T05:32:42,028 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5400273fb20e434abca12e4acbf15178/198_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,031 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,031 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743022_2198, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/198_index.zip 2018-07-21T05:32:42,037 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5400273fb20e434abca12e4acbf15178/198_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,046 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,046 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743023_2199, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_198.json 2018-07-21T05:32:42,052 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_198.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,070 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,070 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743024_2200, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eb8f4512a4949e8a192b849c8a53e1a/199_descriptor.json 2018-07-21T05:32:42,074 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eb8f4512a4949e8a192b849c8a53e1a/199_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,076 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,076 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,076 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,076 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,076 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,077 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,077 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,077 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,077 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,077 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,077 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743025_2201, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/199_index.zip 2018-07-21T05:32:42,080 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9eb8f4512a4949e8a192b849c8a53e1a/199_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,083 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,083 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743026_2202, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_199.json 2018-07-21T05:32:42,087 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_199.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,108 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,108 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743027_2203, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d1eaf07f2ea040bca1e3b2192ad37e90/200_descriptor.json 2018-07-21T05:32:42,113 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d1eaf07f2ea040bca1e3b2192ad37e90/200_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,115 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,115 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743028_2204, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/200_index.zip 2018-07-21T05:32:42,122 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d1eaf07f2ea040bca1e3b2192ad37e90/200_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,126 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,126 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743029_2205, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_200.json 2018-07-21T05:32:42,129 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_200.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:42,150 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,151 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,151 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,151 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743030_2206, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d666af46f5724f059066070e0f70c054/201_descriptor.json 2018-07-21T05:32:42,158 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d666af46f5724f059066070e0f70c054/201_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,160 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,160 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743031_2207, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/201_index.zip 2018-07-21T05:32:42,165 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d666af46f5724f059066070e0f70c054/201_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,168 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,168 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743032_2208, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_201.json 2018-07-21T05:32:42,172 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_201.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,190 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,191 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743033_2209, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0411ea7d05684565bda596216302a1ba/202_descriptor.json 2018-07-21T05:32:42,195 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0411ea7d05684565bda596216302a1ba/202_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,197 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,197 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743034_2210, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/202_index.zip 2018-07-21T05:32:42,202 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0411ea7d05684565bda596216302a1ba/202_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,205 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743035_2211, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_202.json 2018-07-21T05:32:42,220 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_202.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,244 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,244 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743036_2212, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da3f321aa6df4c5991514104708ef8dc/203_descriptor.json 2018-07-21T05:32:42,254 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da3f321aa6df4c5991514104708ef8dc/203_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,256 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,256 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,257 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,257 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743037_2213, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/203_index.zip 2018-07-21T05:32:42,267 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/da3f321aa6df4c5991514104708ef8dc/203_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,271 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,271 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743038_2214, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_203.json 2018-07-21T05:32:42,282 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_203.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,305 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,305 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743039_2215, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7946175843ab42879875ffea0a7b4848/204_descriptor.json 2018-07-21T05:32:42,311 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7946175843ab42879875ffea0a7b4848/204_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,313 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,313 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743040_2216, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/204_index.zip 2018-07-21T05:32:42,325 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7946175843ab42879875ffea0a7b4848/204_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,329 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,330 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743041_2217, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_204.json 2018-07-21T05:32:42,334 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_204.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,362 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,362 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743042_2218, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bc453bf0734605891e7546b40e7ca7/205_descriptor.json 2018-07-21T05:32:42,368 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bc453bf0734605891e7546b40e7ca7/205_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,370 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,370 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743043_2219, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/205_index.zip 2018-07-21T05:32:42,374 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bc453bf0734605891e7546b40e7ca7/205_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,378 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,378 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743044_2220, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_205.json 2018-07-21T05:32:42,385 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_205.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,414 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743045_2221, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4016041da49f4c1ca723ff6c680348fa/206_descriptor.json 2018-07-21T05:32:42,422 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4016041da49f4c1ca723ff6c680348fa/206_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,424 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,425 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,425 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743046_2222, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/206_index.zip 2018-07-21T05:32:42,431 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4016041da49f4c1ca723ff6c680348fa/206_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,435 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,435 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,436 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,436 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743047_2223, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_206.json 2018-07-21T05:32:42,445 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_206.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,465 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,465 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743048_2224, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/311244e5f63749b6914010626e95c4e9/207_descriptor.json 2018-07-21T05:32:42,470 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/311244e5f63749b6914010626e95c4e9/207_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,472 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743049_2225, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/207_index.zip 2018-07-21T05:32:42,482 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/311244e5f63749b6914010626e95c4e9/207_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:42,486 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,486 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743050_2226, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_207.json 2018-07-21T05:32:42,498 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_207.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,501 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:42,516 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:42,517 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,517 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743051_2227, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/11e284b35db440eea90d6e1145d15944/208_descriptor.json 2018-07-21T05:32:42,521 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/11e284b35db440eea90d6e1145d15944/208_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,523 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,523 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,524 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:42,524 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743052_2228, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/208_index.zip 2018-07-21T05:32:42,528 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/11e284b35db440eea90d6e1145d15944/208_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:42,532 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,532 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743053_2229, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_208.json 2018-07-21T05:32:42,538 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_208.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:42,563 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:42,563 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743054_2230, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc07509eca9345d689b096b13ede2a54/209_descriptor.json 2018-07-21T05:32:42,977 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc07509eca9345d689b096b13ede2a54/209_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,980 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:42,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,981 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743055_2231, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/209_index.zip 2018-07-21T05:32:42,987 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc07509eca9345d689b096b13ede2a54/209_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:42,990 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:42,990 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743056_2232, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_209.json 2018-07-21T05:32:42,997 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_209.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,019 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743057_2233, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97613429ecdd4f829baf0157b34d9d28/210_descriptor.json 2018-07-21T05:32:43,044 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97613429ecdd4f829baf0157b34d9d28/210_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,052 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,052 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743058_2234, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/210_index.zip 2018-07-21T05:32:43,066 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97613429ecdd4f829baf0157b34d9d28/210_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,073 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,075 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,075 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743059_2235, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_210.json 2018-07-21T05:32:43,084 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_210.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,107 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,107 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743060_2236, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0d78b610b9d9412596fe0420b67d9f24/211_descriptor.json 2018-07-21T05:32:43,121 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0d78b610b9d9412596fe0420b67d9f24/211_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,124 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,124 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743061_2237, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/211_index.zip 2018-07-21T05:32:43,135 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0d78b610b9d9412596fe0420b67d9f24/211_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,141 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,142 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743062_2238, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_211.json 2018-07-21T05:32:43,146 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_211.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,165 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,165 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,165 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,165 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,166 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,166 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743063_2239, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2829050ba8c4fdf80630560f91e280d/212_descriptor.json 2018-07-21T05:32:43,170 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2829050ba8c4fdf80630560f91e280d/212_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,174 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,174 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743064_2240, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/212_index.zip 2018-07-21T05:32:43,187 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c2829050ba8c4fdf80630560f91e280d/212_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,193 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,193 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743065_2241, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_212.json 2018-07-21T05:32:43,198 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_212.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,229 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,229 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743066_2242, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5126e77081ae48c69b73182d8c964d74/213_descriptor.json 2018-07-21T05:32:43,239 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5126e77081ae48c69b73182d8c964d74/213_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,243 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743067_2243, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/213_index.zip 2018-07-21T05:32:43,251 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5126e77081ae48c69b73182d8c964d74/213_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,259 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,260 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,260 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,260 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,260 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,260 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743068_2244, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_213.json 2018-07-21T05:32:43,274 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_213.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,297 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,298 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743069_2245, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/300a1b201568461a95e05cc5456ee28d/214_descriptor.json 2018-07-21T05:32:43,307 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/300a1b201568461a95e05cc5456ee28d/214_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,309 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,309 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743070_2246, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/214_index.zip 2018-07-21T05:32:43,314 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/300a1b201568461a95e05cc5456ee28d/214_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,319 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743071_2247, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_214.json 2018-07-21T05:32:43,323 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_214.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,340 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,340 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743072_2248, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e335ee1aae2a4461ae92d3ddd2618df0/215_descriptor.json 2018-07-21T05:32:43,344 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e335ee1aae2a4461ae92d3ddd2618df0/215_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,349 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,349 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743073_2249, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/215_index.zip 2018-07-21T05:32:43,354 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e335ee1aae2a4461ae92d3ddd2618df0/215_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,365 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,365 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743074_2250, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_215.json 2018-07-21T05:32:43,371 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_215.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,390 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,390 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743075_2251, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/add9d5c815e1497e8c2b69c4c3bac51e/216_descriptor.json 2018-07-21T05:32:43,400 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/add9d5c815e1497e8c2b69c4c3bac51e/216_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,406 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,406 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,407 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,407 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743076_2252, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/216_index.zip 2018-07-21T05:32:43,411 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/add9d5c815e1497e8c2b69c4c3bac51e/216_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,414 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,415 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743077_2253, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_216.json 2018-07-21T05:32:43,419 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_216.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,439 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,440 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743078_2254, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e274d6221f34e46a258cef4d5a27109/217_descriptor.json 2018-07-21T05:32:43,444 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e274d6221f34e46a258cef4d5a27109/217_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,446 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,446 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,446 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,446 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,447 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,447 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,447 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,447 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,447 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743079_2255, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/217_index.zip 2018-07-21T05:32:43,457 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e274d6221f34e46a258cef4d5a27109/217_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,461 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,461 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743080_2256, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_217.json 2018-07-21T05:32:43,469 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_217.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,488 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,488 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743081_2257, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/af4be554d11349f9bc4d37d2cd7be61f/218_descriptor.json 2018-07-21T05:32:43,492 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/af4be554d11349f9bc4d37d2cd7be61f/218_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,494 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,495 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743082_2258, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/218_index.zip 2018-07-21T05:32:43,499 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/af4be554d11349f9bc4d37d2cd7be61f/218_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,508 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,508 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743083_2259, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_218.json 2018-07-21T05:32:43,514 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_218.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,534 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743084_2260, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7535c6e04ec4fd9885fb7e494ca3b71/219_descriptor.json 2018-07-21T05:32:43,550 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7535c6e04ec4fd9885fb7e494ca3b71/219_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,564 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,564 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743085_2261, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/219_index.zip 2018-07-21T05:32:43,568 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d7535c6e04ec4fd9885fb7e494ca3b71/219_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,572 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,572 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743086_2262, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_219.json 2018-07-21T05:32:43,577 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_219.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,603 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,603 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,604 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,604 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743087_2263, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/251ccbeda59a4c6badfb25af91730b6d/220_descriptor.json 2018-07-21T05:32:43,610 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/251ccbeda59a4c6badfb25af91730b6d/220_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,612 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,612 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,613 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,613 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743088_2264, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/220_index.zip 2018-07-21T05:32:43,617 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/251ccbeda59a4c6badfb25af91730b6d/220_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,620 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,620 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743089_2265, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_220.json 2018-07-21T05:32:43,624 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_220.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,643 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,643 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743090_2266, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2f1a8bb3a454a71bbc8efe06645f107/221_descriptor.json 2018-07-21T05:32:43,648 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2f1a8bb3a454a71bbc8efe06645f107/221_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,650 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743091_2267, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/221_index.zip 2018-07-21T05:32:43,657 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2f1a8bb3a454a71bbc8efe06645f107/221_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,663 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,663 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743092_2268, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_221.json 2018-07-21T05:32:43,673 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_221.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,691 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,692 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743093_2269, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e42836cf9f3c4673bd7b1d73b9074ab7/222_descriptor.json 2018-07-21T05:32:43,697 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e42836cf9f3c4673bd7b1d73b9074ab7/222_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,699 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,699 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743094_2270, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/222_index.zip 2018-07-21T05:32:43,714 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e42836cf9f3c4673bd7b1d73b9074ab7/222_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,718 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,718 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743095_2271, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_222.json 2018-07-21T05:32:43,749 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_222.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,771 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,771 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743096_2272, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc3118734ce43beabdb366597752af4/223_descriptor.json 2018-07-21T05:32:43,776 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc3118734ce43beabdb366597752af4/223_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,778 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,778 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743097_2273, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/223_index.zip 2018-07-21T05:32:43,785 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfc3118734ce43beabdb366597752af4/223_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,789 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,789 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743098_2274, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_223.json 2018-07-21T05:32:43,797 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_223.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:43,817 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,817 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743099_2275, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e0fe9dcf74d34d2a87d09ab1190d0a43/224_descriptor.json 2018-07-21T05:32:43,823 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e0fe9dcf74d34d2a87d09ab1190d0a43/224_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:43,825 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,825 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743100_2276, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/224_index.zip 2018-07-21T05:32:43,830 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e0fe9dcf74d34d2a87d09ab1190d0a43/224_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,843 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,843 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743101_2277, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_224.json 2018-07-21T05:32:43,850 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_224.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,875 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,875 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743102_2278, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2d29953bf7b4ca692f4aef18a93b9ae/225_descriptor.json 2018-07-21T05:32:43,880 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2d29953bf7b4ca692f4aef18a93b9ae/225_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,884 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,884 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,884 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,884 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,884 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,885 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,885 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,885 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,885 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743103_2279, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/225_index.zip 2018-07-21T05:32:43,892 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a2d29953bf7b4ca692f4aef18a93b9ae/225_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,913 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,913 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743104_2280, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_225.json 2018-07-21T05:32:43,919 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_225.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:43,941 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743105_2281, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ffbdf16a8e747f39fca35515ae2f617/226_descriptor.json 2018-07-21T05:32:43,946 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ffbdf16a8e747f39fca35515ae2f617/226_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,948 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:43,948 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743106_2282, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/226_index.zip 2018-07-21T05:32:43,953 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ffbdf16a8e747f39fca35515ae2f617/226_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:43,961 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,961 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,961 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:43,961 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:43,961 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:43,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:43,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:43,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:43,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:43,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:43,962 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743107_2283, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_226.json 2018-07-21T05:32:43,970 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_226.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,004 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,004 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743108_2284, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b894d36adbbd489a9167f6adf8d9bf3f/227_descriptor.json 2018-07-21T05:32:44,011 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b894d36adbbd489a9167f6adf8d9bf3f/227_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,013 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,013 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743109_2285, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/227_index.zip 2018-07-21T05:32:44,420 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b894d36adbbd489a9167f6adf8d9bf3f/227_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,424 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,424 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743110_2286, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_227.json 2018-07-21T05:32:44,429 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_227.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,452 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,452 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743111_2287, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6cfb936fa7c84a52b0ff625e1b468aca/228_descriptor.json 2018-07-21T05:32:44,456 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6cfb936fa7c84a52b0ff625e1b468aca/228_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,458 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,458 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743112_2288, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/228_index.zip 2018-07-21T05:32:44,461 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6cfb936fa7c84a52b0ff625e1b468aca/228_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,465 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,465 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743113_2289, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_228.json 2018-07-21T05:32:44,469 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_228.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,487 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,487 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743114_2290, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f5efd7b82ec4476b7fec4441e8d9409/229_descriptor.json 2018-07-21T05:32:44,491 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f5efd7b82ec4476b7fec4441e8d9409/229_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,493 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743115_2291, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/229_index.zip 2018-07-21T05:32:44,497 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5f5efd7b82ec4476b7fec4441e8d9409/229_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,500 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,500 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,501 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,501 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743116_2292, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_229.json 2018-07-21T05:32:44,504 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_229.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,525 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,525 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,526 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,526 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743117_2293, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2bff0deced1043f0add9611ebfe936ad/230_descriptor.json 2018-07-21T05:32:44,530 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2bff0deced1043f0add9611ebfe936ad/230_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,533 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,533 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743118_2294, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/230_index.zip 2018-07-21T05:32:44,537 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2bff0deced1043f0add9611ebfe936ad/230_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,540 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,540 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,541 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,541 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743119_2295, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_230.json 2018-07-21T05:32:44,545 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_230.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,563 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,563 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,564 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,564 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743120_2296, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0806a999414c4a17a0b8f14531be92e6/231_descriptor.json 2018-07-21T05:32:44,568 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0806a999414c4a17a0b8f14531be92e6/231_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,570 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,571 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,571 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743121_2297, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/231_index.zip 2018-07-21T05:32:44,575 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0806a999414c4a17a0b8f14531be92e6/231_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,578 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743122_2298, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_231.json 2018-07-21T05:32:44,582 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_231.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,601 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,601 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743123_2299, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4977a71c0f5463e8bf92fb16c2ae244/232_descriptor.json 2018-07-21T05:32:44,605 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4977a71c0f5463e8bf92fb16c2ae244/232_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,607 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,608 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743124_2300, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/232_index.zip 2018-07-21T05:32:44,611 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d4977a71c0f5463e8bf92fb16c2ae244/232_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,615 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,615 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743125_2301, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_232.json 2018-07-21T05:32:44,619 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_232.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,637 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,638 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,638 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743126_2302, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33239c43eb1e4fbeac9430d764fa9e31/233_descriptor.json 2018-07-21T05:32:44,642 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33239c43eb1e4fbeac9430d764fa9e31/233_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,644 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,644 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,645 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,645 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743127_2303, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/233_index.zip 2018-07-21T05:32:44,649 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/33239c43eb1e4fbeac9430d764fa9e31/233_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,652 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743128_2304, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_233.json 2018-07-21T05:32:44,656 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_233.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,674 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,675 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743129_2305, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a024bb012046a598e06f8205cfcb67/234_descriptor.json 2018-07-21T05:32:44,679 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a024bb012046a598e06f8205cfcb67/234_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,681 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,681 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743130_2306, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/234_index.zip 2018-07-21T05:32:44,686 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e3a024bb012046a598e06f8205cfcb67/234_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,689 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,689 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743131_2307, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_234.json 2018-07-21T05:32:44,693 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_234.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,711 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,712 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743132_2308, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cd3045789ef64b198d9e6c4f361373a3/235_descriptor.json 2018-07-21T05:32:44,716 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cd3045789ef64b198d9e6c4f361373a3/235_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,718 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,718 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743133_2309, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/235_index.zip 2018-07-21T05:32:44,722 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cd3045789ef64b198d9e6c4f361373a3/235_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,726 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,726 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,726 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,726 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,726 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743134_2310, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_235.json 2018-07-21T05:32:44,730 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_235.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,752 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,752 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743135_2311, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26084b2244644328a10d3703f2bdc1e/236_descriptor.json 2018-07-21T05:32:44,757 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26084b2244644328a10d3703f2bdc1e/236_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,759 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,759 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743136_2312, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/236_index.zip 2018-07-21T05:32:44,764 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26084b2244644328a10d3703f2bdc1e/236_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,767 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,767 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743137_2313, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_236.json 2018-07-21T05:32:44,771 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_236.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:44,789 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,789 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743138_2314, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7bf2ce545ab54da5ba07a04134f36780/237_descriptor.json 2018-07-21T05:32:44,793 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7bf2ce545ab54da5ba07a04134f36780/237_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,795 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,795 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743139_2315, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/237_index.zip 2018-07-21T05:32:44,799 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7bf2ce545ab54da5ba07a04134f36780/237_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,802 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,803 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,803 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743140_2316, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_237.json 2018-07-21T05:32:44,807 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_237.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,825 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,825 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743141_2317, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e566ef26ab7446c8bd54ccb960944cb/238_descriptor.json 2018-07-21T05:32:44,829 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e566ef26ab7446c8bd54ccb960944cb/238_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:44,831 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,831 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743142_2318, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/238_index.zip 2018-07-21T05:32:44,835 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e566ef26ab7446c8bd54ccb960944cb/238_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,838 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,838 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743143_2319, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_238.json 2018-07-21T05:32:44,842 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_238.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,860 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,860 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743144_2320, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1eb378141d64258be3bc38faa8b1013/239_descriptor.json 2018-07-21T05:32:44,864 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1eb378141d64258be3bc38faa8b1013/239_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,866 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,866 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743145_2321, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/239_index.zip 2018-07-21T05:32:44,870 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1eb378141d64258be3bc38faa8b1013/239_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,873 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,874 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743146_2322, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_239.json 2018-07-21T05:32:44,878 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_239.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,898 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,898 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,899 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,899 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743147_2323, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ab1a48a19a345a5b2916a513c710f49/240_descriptor.json 2018-07-21T05:32:44,906 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ab1a48a19a345a5b2916a513c710f49/240_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:44,908 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,908 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743148_2324, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/240_index.zip 2018-07-21T05:32:44,912 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ab1a48a19a345a5b2916a513c710f49/240_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:44,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,916 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743149_2325, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_240.json 2018-07-21T05:32:44,926 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_240.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,946 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,946 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743150_2326, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4ed49afeeef425680a515015f9fec8f/241_descriptor.json 2018-07-21T05:32:44,950 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4ed49afeeef425680a515015f9fec8f/241_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,952 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,952 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743151_2327, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/241_index.zip 2018-07-21T05:32:44,957 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4ed49afeeef425680a515015f9fec8f/241_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:44,960 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,960 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743152_2328, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_241.json 2018-07-21T05:32:44,964 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_241.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,982 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,982 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743153_2329, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/478651f6da754f9a95085e22563c49eb/242_descriptor.json 2018-07-21T05:32:44,986 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/478651f6da754f9a95085e22563c49eb/242_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:44,988 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:44,988 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743154_2330, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/242_index.zip 2018-07-21T05:32:44,992 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/478651f6da754f9a95085e22563c49eb/242_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:44,994 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,994 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,994 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:44,995 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:44,995 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:44,995 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:44,995 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:44,995 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:44,995 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743155_2331, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_242.json 2018-07-21T05:32:44,998 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_242.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,019 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,019 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,020 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,020 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743156_2332, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa0d621d50de44fd8c9d04eef59c4fe3/243_descriptor.json 2018-07-21T05:32:45,024 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa0d621d50de44fd8c9d04eef59c4fe3/243_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,026 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,026 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,026 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,026 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,026 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743157_2333, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/243_index.zip 2018-07-21T05:32:45,029 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa0d621d50de44fd8c9d04eef59c4fe3/243_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,032 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,032 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,032 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,032 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,032 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,033 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,033 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743158_2334, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_243.json 2018-07-21T05:32:45,036 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_243.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,053 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,053 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743159_2335, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca0da0220d94ae9bd4669c3ac635997/244_descriptor.json 2018-07-21T05:32:45,057 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca0da0220d94ae9bd4669c3ac635997/244_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,059 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,059 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743160_2336, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/244_index.zip 2018-07-21T05:32:45,064 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ca0da0220d94ae9bd4669c3ac635997/244_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,072 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,072 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743161_2337, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_244.json 2018-07-21T05:32:45,076 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_244.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,094 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,094 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743162_2338, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4761f918af14866a1213f18be12ca01/245_descriptor.json 2018-07-21T05:32:45,099 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4761f918af14866a1213f18be12ca01/245_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,100 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,100 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743163_2339, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/245_index.zip 2018-07-21T05:32:45,104 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f4761f918af14866a1213f18be12ca01/245_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,107 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,107 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743164_2340, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_245.json 2018-07-21T05:32:45,111 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_245.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,128 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,128 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743165_2341, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/689942477ba943a5a119ffdb5d0df750/246_descriptor.json 2018-07-21T05:32:45,132 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/689942477ba943a5a119ffdb5d0df750/246_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,134 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,134 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743166_2342, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/246_index.zip 2018-07-21T05:32:45,138 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/689942477ba943a5a119ffdb5d0df750/246_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,141 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,141 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743167_2343, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_246.json 2018-07-21T05:32:45,147 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_246.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,165 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,165 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743168_2344, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a0143fda011481caceaa320b5272d37/247_descriptor.json 2018-07-21T05:32:45,169 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a0143fda011481caceaa320b5272d37/247_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,173 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,173 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743169_2345, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/247_index.zip 2018-07-21T05:32:45,178 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8a0143fda011481caceaa320b5272d37/247_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,182 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743170_2346, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_247.json 2018-07-21T05:32:45,186 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_247.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,209 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,209 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743171_2347, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41149973415145c0ba1c3c4cf965040e/248_descriptor.json 2018-07-21T05:32:45,214 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41149973415145c0ba1c3c4cf965040e/248_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,218 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,218 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743172_2348, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/248_index.zip 2018-07-21T05:32:45,222 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41149973415145c0ba1c3c4cf965040e/248_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,225 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,225 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743173_2349, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_248.json 2018-07-21T05:32:45,229 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_248.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,247 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,247 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743174_2350, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/272de78405e049c8b1c7f822079f27e2/249_descriptor.json 2018-07-21T05:32:45,251 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/272de78405e049c8b1c7f822079f27e2/249_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,253 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,253 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743175_2351, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/249_index.zip 2018-07-21T05:32:45,258 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/272de78405e049c8b1c7f822079f27e2/249_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,262 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743176_2352, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_249.json 2018-07-21T05:32:45,266 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_249.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,286 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,286 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743177_2353, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e7cc373da0854b04be096de9022976c4/250_descriptor.json 2018-07-21T05:32:45,291 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e7cc373da0854b04be096de9022976c4/250_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,293 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,293 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743178_2354, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/250_index.zip 2018-07-21T05:32:45,297 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e7cc373da0854b04be096de9022976c4/250_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,300 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,300 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743179_2355, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_250.json 2018-07-21T05:32:45,305 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_250.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,329 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,329 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743180_2356, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/745565cc6073419996c3e4bf2f42aa8f/251_descriptor.json 2018-07-21T05:32:45,334 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/745565cc6073419996c3e4bf2f42aa8f/251_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,337 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,337 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743181_2357, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/251_index.zip 2018-07-21T05:32:45,341 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/745565cc6073419996c3e4bf2f42aa8f/251_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,345 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,345 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,345 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743182_2358, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_251.json 2018-07-21T05:32:45,349 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_251.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,368 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,369 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,369 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743183_2359, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1017cad13a1467ab9af12b7d8d4c5a5/252_descriptor.json 2018-07-21T05:32:45,373 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1017cad13a1467ab9af12b7d8d4c5a5/252_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,380 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,380 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743184_2360, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/252_index.zip 2018-07-21T05:32:45,388 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1017cad13a1467ab9af12b7d8d4c5a5/252_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,394 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743185_2361, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_252.json 2018-07-21T05:32:45,399 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_252.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,417 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,418 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743186_2362, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aa15a115edb64db790b81c1ec84a9c8b/253_descriptor.json 2018-07-21T05:32:45,422 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aa15a115edb64db790b81c1ec84a9c8b/253_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,424 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743187_2363, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/253_index.zip 2018-07-21T05:32:45,428 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/aa15a115edb64db790b81c1ec84a9c8b/253_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,431 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,431 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,431 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,432 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,432 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743188_2364, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_253.json 2018-07-21T05:32:45,436 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_253.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,453 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,453 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743189_2365, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb533a3a40364bcba32352ca01961a51/254_descriptor.json 2018-07-21T05:32:45,457 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb533a3a40364bcba32352ca01961a51/254_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,459 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,459 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743190_2366, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/254_index.zip 2018-07-21T05:32:45,471 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bb533a3a40364bcba32352ca01961a51/254_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,474 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,474 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743191_2367, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_254.json 2018-07-21T05:32:45,478 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_254.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,496 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,496 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743192_2368, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1290d3143fc40a79441c371aa64d7d1/255_descriptor.json 2018-07-21T05:32:45,500 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1290d3143fc40a79441c371aa64d7d1/255_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,502 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,502 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743193_2369, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/255_index.zip 2018-07-21T05:32:45,507 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a1290d3143fc40a79441c371aa64d7d1/255_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,510 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,510 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743194_2370, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_255.json 2018-07-21T05:32:45,514 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_255.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,526 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,531 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,531 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743195_2371, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38cb1f7d13764aa38f695b2b0c52b07c/256_descriptor.json 2018-07-21T05:32:45,536 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38cb1f7d13764aa38f695b2b0c52b07c/256_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,538 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,538 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743196_2372, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/256_index.zip 2018-07-21T05:32:45,542 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38cb1f7d13764aa38f695b2b0c52b07c/256_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,545 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743197_2373, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_256.json 2018-07-21T05:32:45,556 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_256.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,574 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,574 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743198_2374, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c81b0716eeb4af690afdb093f70477b/257_descriptor.json 2018-07-21T05:32:45,579 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c81b0716eeb4af690afdb093f70477b/257_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,580 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,581 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,581 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,581 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743199_2375, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/257_index.zip 2018-07-21T05:32:45,584 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3c81b0716eeb4af690afdb093f70477b/257_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,588 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743200_2376, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_257.json 2018-07-21T05:32:45,594 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_257.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,614 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,615 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,615 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743201_2377, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6dc0c3d44702454ea77ef834b0080d4d/258_descriptor.json 2018-07-21T05:32:45,619 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6dc0c3d44702454ea77ef834b0080d4d/258_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,622 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743202_2378, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/258_index.zip 2018-07-21T05:32:45,634 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6dc0c3d44702454ea77ef834b0080d4d/258_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,638 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,638 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743203_2379, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_258.json 2018-07-21T05:32:45,643 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_258.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,670 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,670 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743204_2380, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e4fbf7f0b714255a64a404f2d15b537/259_descriptor.json 2018-07-21T05:32:45,675 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e4fbf7f0b714255a64a404f2d15b537/259_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,677 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,677 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743205_2381, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/259_index.zip 2018-07-21T05:32:45,681 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e4fbf7f0b714255a64a404f2d15b537/259_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,684 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,684 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,684 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,684 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,685 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,685 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,685 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,685 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,685 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,685 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743206_2382, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_259.json 2018-07-21T05:32:45,690 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_259.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,707 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,707 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743207_2383, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba72c057c8f34886bb2f76e2c3800dbc/260_descriptor.json 2018-07-21T05:32:45,711 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba72c057c8f34886bb2f76e2c3800dbc/260_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,713 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,714 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743208_2384, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/260_index.zip 2018-07-21T05:32:45,718 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba72c057c8f34886bb2f76e2c3800dbc/260_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,721 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,721 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743209_2385, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_260.json 2018-07-21T05:32:45,725 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_260.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,742 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,742 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743210_2386, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c98c4edf49c4b4e9e5a9732b47f7741/261_descriptor.json 2018-07-21T05:32:45,747 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c98c4edf49c4b4e9e5a9732b47f7741/261_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,749 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,749 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743211_2387, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/261_index.zip 2018-07-21T05:32:45,754 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1c98c4edf49c4b4e9e5a9732b47f7741/261_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,757 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,757 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743212_2388, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_261.json 2018-07-21T05:32:45,761 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_261.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,780 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,781 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743213_2389, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7cc103b3e7ff4ad1a3447a57f4b186b3/262_descriptor.json 2018-07-21T05:32:45,786 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7cc103b3e7ff4ad1a3447a57f4b186b3/262_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,788 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,788 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743214_2390, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/262_index.zip 2018-07-21T05:32:45,794 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7cc103b3e7ff4ad1a3447a57f4b186b3/262_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,797 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,797 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,797 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,797 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,797 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,798 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,798 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,798 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,798 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,798 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,798 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743215_2391, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_262.json 2018-07-21T05:32:45,802 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_262.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,819 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,819 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743216_2392, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de10b6aa358e41d0a8f92fe7f691903b/263_descriptor.json 2018-07-21T05:32:45,824 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de10b6aa358e41d0a8f92fe7f691903b/263_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,825 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,825 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:45,826 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,826 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743217_2393, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/263_index.zip 2018-07-21T05:32:45,829 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de10b6aa358e41d0a8f92fe7f691903b/263_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,833 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,833 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743218_2394, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_263.json 2018-07-21T05:32:45,837 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_263.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,854 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,854 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743219_2395, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2664cfa3dbaf4e19942e1d97b3acb045/264_descriptor.json 2018-07-21T05:32:45,858 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2664cfa3dbaf4e19942e1d97b3acb045/264_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,866 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,866 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743220_2396, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/264_index.zip 2018-07-21T05:32:45,870 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2664cfa3dbaf4e19942e1d97b3acb045/264_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:45,873 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,874 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743221_2397, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_264.json 2018-07-21T05:32:45,878 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_264.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,896 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743222_2398, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e13bcf0c37e464b93a4f848766ba25b/265_descriptor.json 2018-07-21T05:32:45,900 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e13bcf0c37e464b93a4f848766ba25b/265_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,902 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,902 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743223_2399, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/265_index.zip 2018-07-21T05:32:45,906 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0e13bcf0c37e464b93a4f848766ba25b/265_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,909 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743224_2400, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_265.json 2018-07-21T05:32:45,913 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_265.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:45,936 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,936 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743225_2401, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a45f3f3f7d124f8d9581e2acde71b5d7/266_descriptor.json 2018-07-21T05:32:45,940 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a45f3f3f7d124f8d9581e2acde71b5d7/266_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:45,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,942 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743226_2402, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/266_index.zip 2018-07-21T05:32:45,947 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a45f3f3f7d124f8d9581e2acde71b5d7/266_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,950 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:45,950 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743227_2403, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_266.json 2018-07-21T05:32:45,954 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_266.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:45,971 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,971 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743228_2404, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a46ca56a0ab346caaf720969ef3ec078/267_descriptor.json 2018-07-21T05:32:45,975 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a46ca56a0ab346caaf720969ef3ec078/267_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:45,977 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,977 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743229_2405, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/267_index.zip 2018-07-21T05:32:45,981 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a46ca56a0ab346caaf720969ef3ec078/267_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:45,984 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:45,984 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743230_2406, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_267.json 2018-07-21T05:32:45,989 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_267.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,007 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,007 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743231_2407, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c06618fc5214f7885545e29206b52ed/268_descriptor.json 2018-07-21T05:32:46,012 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c06618fc5214f7885545e29206b52ed/268_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,014 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743232_2408, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/268_index.zip 2018-07-21T05:32:46,018 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8c06618fc5214f7885545e29206b52ed/268_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,021 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,021 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743233_2409, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_268.json 2018-07-21T05:32:46,025 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_268.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,042 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,042 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,042 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,042 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,043 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,043 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,043 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,043 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,043 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743234_2410, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bdd3a619534e4ad2b4dfd039deb339f4/269_descriptor.json 2018-07-21T05:32:46,047 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bdd3a619534e4ad2b4dfd039deb339f4/269_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,049 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743235_2411, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/269_index.zip 2018-07-21T05:32:46,053 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bdd3a619534e4ad2b4dfd039deb339f4/269_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,057 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,057 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743236_2412, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_269.json 2018-07-21T05:32:46,063 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_269.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,081 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,081 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743237_2413, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/747c16608565409a922b7ec191765a75/270_descriptor.json 2018-07-21T05:32:46,086 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/747c16608565409a922b7ec191765a75/270_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,088 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,088 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743238_2414, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/270_index.zip 2018-07-21T05:32:46,094 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/747c16608565409a922b7ec191765a75/270_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,097 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,098 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743239_2415, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_270.json 2018-07-21T05:32:46,102 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_270.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,119 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,119 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743240_2416, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47bb67484ce64c78af94d88d47d2b6e9/271_descriptor.json 2018-07-21T05:32:46,123 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47bb67484ce64c78af94d88d47d2b6e9/271_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,125 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,125 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743241_2417, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/271_index.zip 2018-07-21T05:32:46,129 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47bb67484ce64c78af94d88d47d2b6e9/271_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,132 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,132 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743242_2418, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_271.json 2018-07-21T05:32:46,136 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_271.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:46,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,154 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743243_2419, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/76c50f7691f749feb41f503803a44fd7/272_descriptor.json 2018-07-21T05:32:46,158 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/76c50f7691f749feb41f503803a44fd7/272_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,160 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,160 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743244_2420, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/272_index.zip 2018-07-21T05:32:46,164 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/76c50f7691f749feb41f503803a44fd7/272_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,168 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,168 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,168 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,168 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,168 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,168 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743245_2421, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_272.json 2018-07-21T05:32:46,172 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_272.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,190 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,190 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743246_2422, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5fe2cc2cc12d43e5aa4175927272eaa1/273_descriptor.json 2018-07-21T05:32:46,195 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5fe2cc2cc12d43e5aa4175927272eaa1/273_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,196 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,196 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,196 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:46,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,197 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743247_2423, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/273_index.zip 2018-07-21T05:32:46,201 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5fe2cc2cc12d43e5aa4175927272eaa1/273_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,204 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743248_2424, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_273.json 2018-07-21T05:32:46,208 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_273.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:46,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,230 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743249_2425, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/180b591c8f494e8fae1f0d401bb79d05/274_descriptor.json 2018-07-21T05:32:46,234 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/180b591c8f494e8fae1f0d401bb79d05/274_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,236 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,236 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743250_2426, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/274_index.zip 2018-07-21T05:32:46,241 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/180b591c8f494e8fae1f0d401bb79d05/274_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:46,245 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,245 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743251_2427, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_274.json 2018-07-21T05:32:46,259 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_274.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,280 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,280 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743252_2428, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b0c076b12cf24e5f9941d6e278b6110f/275_descriptor.json 2018-07-21T05:32:46,689 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b0c076b12cf24e5f9941d6e278b6110f/275_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,691 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,691 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743253_2429, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/275_index.zip 2018-07-21T05:32:46,698 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b0c076b12cf24e5f9941d6e278b6110f/275_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,701 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,701 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743254_2430, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_275.json 2018-07-21T05:32:46,705 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_275.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,722 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,722 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743255_2431, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8045f572efe4343aeaeacc4a59ec473/276_descriptor.json 2018-07-21T05:32:46,726 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8045f572efe4343aeaeacc4a59ec473/276_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,728 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,728 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743256_2432, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/276_index.zip 2018-07-21T05:32:46,731 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c8045f572efe4343aeaeacc4a59ec473/276_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,734 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,735 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:46,735 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,735 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,735 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,735 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,735 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743257_2433, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_276.json 2018-07-21T05:32:46,739 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_276.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,756 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,756 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,757 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,757 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743258_2434, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8987ddeffabf4d1b8544a573353a5ae5/277_descriptor.json 2018-07-21T05:32:46,761 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8987ddeffabf4d1b8544a573353a5ae5/277_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,763 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,763 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743259_2435, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/277_index.zip 2018-07-21T05:32:46,767 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8987ddeffabf4d1b8544a573353a5ae5/277_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,770 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,770 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743260_2436, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_277.json 2018-07-21T05:32:46,774 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_277.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,791 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743261_2437, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fcb323aff9a949c397a3d378567c343a/278_descriptor.json 2018-07-21T05:32:46,795 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fcb323aff9a949c397a3d378567c343a/278_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,797 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743262_2438, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/278_index.zip 2018-07-21T05:32:46,801 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fcb323aff9a949c397a3d378567c343a/278_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,805 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743263_2439, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_278.json 2018-07-21T05:32:46,810 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_278.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:46,827 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,827 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743264_2440, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa40cb8095834b36bdb46db2960a0aa1/279_descriptor.json 2018-07-21T05:32:46,833 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa40cb8095834b36bdb46db2960a0aa1/279_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:46,835 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,835 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743265_2441, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/279_index.zip 2018-07-21T05:32:46,839 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fa40cb8095834b36bdb46db2960a0aa1/279_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:46,842 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,843 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743266_2442, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_279.json 2018-07-21T05:32:46,847 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_279.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:46,868 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,868 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743267_2443, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae5cb6d439dd460bb09c85e79ee01cc2/280_descriptor.json 2018-07-21T05:32:46,873 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae5cb6d439dd460bb09c85e79ee01cc2/280_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:46,875 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:46,875 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743268_2444, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/280_index.zip 2018-07-21T05:32:46,879 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ae5cb6d439dd460bb09c85e79ee01cc2/280_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:46,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:46,882 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743269_2445, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_280.json 2018-07-21T05:32:47,289 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_280.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,308 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,308 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743270_2446, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d0126affb343de8de3daa94c00b528/281_descriptor.json 2018-07-21T05:32:47,312 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d0126affb343de8de3daa94c00b528/281_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,314 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,314 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743271_2447, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/281_index.zip 2018-07-21T05:32:47,328 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d0126affb343de8de3daa94c00b528/281_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:47,331 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,331 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743272_2448, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_281.json 2018-07-21T05:32:47,338 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_281.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,362 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:47,363 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,363 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743273_2449, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a17943373e84284b063aa90a7e96bcf/282_descriptor.json 2018-07-21T05:32:47,367 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a17943373e84284b063aa90a7e96bcf/282_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,369 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,369 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743274_2450, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/282_index.zip 2018-07-21T05:32:47,373 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a17943373e84284b063aa90a7e96bcf/282_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:47,378 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,378 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743275_2451, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_282.json 2018-07-21T05:32:47,382 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_282.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,399 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,399 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743276_2452, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb03e255ac0f48de9f0f14ce2cdec8da/283_descriptor.json 2018-07-21T05:32:47,805 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb03e255ac0f48de9f0f14ce2cdec8da/283_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:47,807 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,808 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743277_2453, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/283_index.zip 2018-07-21T05:32:47,813 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eb03e255ac0f48de9f0f14ce2cdec8da/283_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:47,816 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,816 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743278_2454, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_283.json 2018-07-21T05:32:47,829 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_283.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:47,849 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,849 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743279_2455, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/31cb16bc2a0b44a49c04fda5a910db51/284_descriptor.json 2018-07-21T05:32:47,854 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/31cb16bc2a0b44a49c04fda5a910db51/284_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:47,856 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,856 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743280_2456, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/284_index.zip 2018-07-21T05:32:47,864 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/31cb16bc2a0b44a49c04fda5a910db51/284_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,869 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,869 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743281_2457, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_284.json 2018-07-21T05:32:47,873 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_284.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,893 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,894 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743282_2458, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f09441630a4346eb831be66a0b2144a6/285_descriptor.json 2018-07-21T05:32:47,898 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f09441630a4346eb831be66a0b2144a6/285_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,900 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,901 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,901 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,901 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,901 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743283_2459, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/285_index.zip 2018-07-21T05:32:47,905 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f09441630a4346eb831be66a0b2144a6/285_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:47,909 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,909 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743284_2460, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_285.json 2018-07-21T05:32:47,913 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_285.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,933 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,933 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743285_2461, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16d240723e754b4db8404f4bb2e2578d/286_descriptor.json 2018-07-21T05:32:47,937 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16d240723e754b4db8404f4bb2e2578d/286_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,939 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,939 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,939 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,940 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,940 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743286_2462, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/286_index.zip 2018-07-21T05:32:47,944 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/16d240723e754b4db8404f4bb2e2578d/286_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,947 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,948 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743287_2463, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_286.json 2018-07-21T05:32:47,952 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_286.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,971 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,971 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743288_2464, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fefa59b650554e1683170a36fe76b899/287_descriptor.json 2018-07-21T05:32:47,976 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fefa59b650554e1683170a36fe76b899/287_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:47,978 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:47,978 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743289_2465, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/287_index.zip 2018-07-21T05:32:47,982 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fefa59b650554e1683170a36fe76b899/287_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:47,985 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:47,985 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743290_2466, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_287.json 2018-07-21T05:32:47,989 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_287.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,006 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,006 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743291_2467, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2256caf1af2941fe94e94833f6d3e9ce/288_descriptor.json 2018-07-21T05:32:48,013 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2256caf1af2941fe94e94833f6d3e9ce/288_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,015 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,016 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743292_2468, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/288_index.zip 2018-07-21T05:32:48,019 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2256caf1af2941fe94e94833f6d3e9ce/288_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,023 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,023 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743293_2469, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_288.json 2018-07-21T05:32:48,027 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_288.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,045 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,045 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743294_2470, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea3be3b1f24e4fbab8de44707de6fb3c/289_descriptor.json 2018-07-21T05:32:48,049 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea3be3b1f24e4fbab8de44707de6fb3c/289_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,051 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,052 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743295_2471, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/289_index.zip 2018-07-21T05:32:48,056 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ea3be3b1f24e4fbab8de44707de6fb3c/289_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,059 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,059 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743296_2472, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_289.json 2018-07-21T05:32:48,070 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_289.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,096 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,096 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743297_2473, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a093fcfce154101acfdc883de8a84ea/290_descriptor.json 2018-07-21T05:32:48,100 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a093fcfce154101acfdc883de8a84ea/290_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,102 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,102 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743298_2474, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/290_index.zip 2018-07-21T05:32:48,106 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a093fcfce154101acfdc883de8a84ea/290_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,109 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,109 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743299_2475, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_290.json 2018-07-21T05:32:48,113 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_290.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,131 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,131 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743300_2476, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c88a70bdd34f407f8c0274a7c543096d/291_descriptor.json 2018-07-21T05:32:48,135 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c88a70bdd34f407f8c0274a7c543096d/291_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,137 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743301_2477, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/291_index.zip 2018-07-21T05:32:48,141 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c88a70bdd34f407f8c0274a7c543096d/291_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,144 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,144 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743302_2478, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_291.json 2018-07-21T05:32:48,148 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_291.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,165 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,165 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743303_2479, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47e34bc52cca43e195666498fc0f8a76/292_descriptor.json 2018-07-21T05:32:48,169 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47e34bc52cca43e195666498fc0f8a76/292_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,171 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,171 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743304_2480, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/292_index.zip 2018-07-21T05:32:48,175 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/47e34bc52cca43e195666498fc0f8a76/292_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,178 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,178 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743305_2481, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_292.json 2018-07-21T05:32:48,185 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_292.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,205 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,205 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,205 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,205 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,205 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,205 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743306_2482, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69bc90cda14e4c4c93f10a238473a28f/293_descriptor.json 2018-07-21T05:32:48,538 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:48,611 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69bc90cda14e4c4c93f10a238473a28f/293_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,613 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,613 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743307_2483, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/293_index.zip 2018-07-21T05:32:48,618 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/69bc90cda14e4c4c93f10a238473a28f/293_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,621 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,621 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743308_2484, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_293.json 2018-07-21T05:32:48,625 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_293.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,642 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743309_2485, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d2e33052342b4891b05145baff824345/294_descriptor.json 2018-07-21T05:32:48,647 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d2e33052342b4891b05145baff824345/294_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,648 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,649 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,649 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743310_2486, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/294_index.zip 2018-07-21T05:32:48,653 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d2e33052342b4891b05145baff824345/294_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,656 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,656 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743311_2487, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_294.json 2018-07-21T05:32:48,661 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_294.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,679 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,679 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743312_2488, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3530b59100141a1b5fd30065ea30223/295_descriptor.json 2018-07-21T05:32:48,687 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3530b59100141a1b5fd30065ea30223/295_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,689 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,689 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743313_2489, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/295_index.zip 2018-07-21T05:32:48,693 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3530b59100141a1b5fd30065ea30223/295_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,696 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,696 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,696 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,697 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,697 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743314_2490, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_295.json 2018-07-21T05:32:48,701 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_295.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,719 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743315_2491, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfaeeedc5a9b4fc88e5c23870a6059f9/296_descriptor.json 2018-07-21T05:32:48,723 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfaeeedc5a9b4fc88e5c23870a6059f9/296_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,725 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,725 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743316_2492, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/296_index.zip 2018-07-21T05:32:48,728 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cfaeeedc5a9b4fc88e5c23870a6059f9/296_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,731 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,731 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743317_2493, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_296.json 2018-07-21T05:32:48,735 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_296.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,764 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743318_2494, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ee910cd6904a8680ed2768a5d3bd53/297_descriptor.json 2018-07-21T05:32:48,771 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ee910cd6904a8680ed2768a5d3bd53/297_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,774 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743319_2495, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/297_index.zip 2018-07-21T05:32:48,787 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ee910cd6904a8680ed2768a5d3bd53/297_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,791 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,791 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743320_2496, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_297.json 2018-07-21T05:32:48,795 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_297.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,816 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,816 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743321_2497, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a6998a4ce8e44659cb1d8a935ec0ed9/298_descriptor.json 2018-07-21T05:32:48,820 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a6998a4ce8e44659cb1d8a935ec0ed9/298_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,822 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,822 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743322_2498, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/298_index.zip 2018-07-21T05:32:48,827 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0a6998a4ce8e44659cb1d8a935ec0ed9/298_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,830 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,830 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743323_2499, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_298.json 2018-07-21T05:32:48,838 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_298.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,855 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,855 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,856 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,856 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743324_2500, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88c00b5c661f4c3fb6ec1c57f1a8cf54/299_descriptor.json 2018-07-21T05:32:48,860 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88c00b5c661f4c3fb6ec1c57f1a8cf54/299_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,862 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,862 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743325_2501, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/299_index.zip 2018-07-21T05:32:48,866 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/88c00b5c661f4c3fb6ec1c57f1a8cf54/299_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,869 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,870 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743326_2502, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_299.json 2018-07-21T05:32:48,874 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_299.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,893 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,894 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743327_2503, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/387c4e373b6b4aeeb26dc88c53d16aa7/300_descriptor.json 2018-07-21T05:32:48,898 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/387c4e373b6b4aeeb26dc88c53d16aa7/300_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,900 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,900 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,900 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,900 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,900 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,901 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,901 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743328_2504, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/300_index.zip 2018-07-21T05:32:48,905 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/387c4e373b6b4aeeb26dc88c53d16aa7/300_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,908 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,908 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743329_2505, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_300.json 2018-07-21T05:32:48,912 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_300.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:48,930 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,930 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743330_2506, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4504a9e5b46a48e8b22b25fe33ef4c1d/301_descriptor.json 2018-07-21T05:32:48,934 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4504a9e5b46a48e8b22b25fe33ef4c1d/301_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,936 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,936 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743331_2507, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/301_index.zip 2018-07-21T05:32:48,940 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4504a9e5b46a48e8b22b25fe33ef4c1d/301_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,943 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,944 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:48,944 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,944 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:48,944 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,944 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743332_2508, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_301.json 2018-07-21T05:32:48,947 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_301.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,964 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,964 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,965 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,965 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743333_2509, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c320b24b98e45a6811ec5020636edb2/302_descriptor.json 2018-07-21T05:32:48,969 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c320b24b98e45a6811ec5020636edb2/302_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:48,971 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,971 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743334_2510, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/302_index.zip 2018-07-21T05:32:48,975 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c320b24b98e45a6811ec5020636edb2/302_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:48,978 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,978 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743335_2511, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_302.json 2018-07-21T05:32:48,982 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_302.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:48,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:48,999 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743336_2512, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053fe9de570946798a966f12fc4a2c0c/303_descriptor.json 2018-07-21T05:32:49,003 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053fe9de570946798a966f12fc4a2c0c/303_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,005 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,006 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743337_2513, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/303_index.zip 2018-07-21T05:32:49,009 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053fe9de570946798a966f12fc4a2c0c/303_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,012 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,012 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743338_2514, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_303.json 2018-07-21T05:32:49,016 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_303.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,033 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,034 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,034 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,034 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743339_2515, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e4619980eee7403797bd586c35ec49d1/304_descriptor.json 2018-07-21T05:32:49,038 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e4619980eee7403797bd586c35ec49d1/304_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:49,040 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,040 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743340_2516, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/304_index.zip 2018-07-21T05:32:49,046 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e4619980eee7403797bd586c35ec49d1/304_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,050 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743341_2517, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_304.json 2018-07-21T05:32:49,237 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:32:49,300 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:32:49,455 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_304.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,482 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,482 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743342_2518, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/40d33fcb277946299fdf96c630e2d717/305_descriptor.json 2018-07-21T05:32:49,487 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/40d33fcb277946299fdf96c630e2d717/305_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,489 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,489 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743343_2519, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/305_index.zip 2018-07-21T05:32:49,494 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/40d33fcb277946299fdf96c630e2d717/305_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,497 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,497 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743344_2520, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_305.json 2018-07-21T05:32:49,501 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_305.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,522 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,522 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743345_2521, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f73873005d5b4bc9b2ef520c07717f07/306_descriptor.json 2018-07-21T05:32:49,527 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f73873005d5b4bc9b2ef520c07717f07/306_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,530 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,530 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743346_2522, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/306_index.zip 2018-07-21T05:32:49,536 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f73873005d5b4bc9b2ef520c07717f07/306_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:49,539 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,539 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743347_2523, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_306.json 2018-07-21T05:32:49,544 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_306.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,567 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743348_2524, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c0f5c3b1c664085b1dc4372eeb8a5c2/307_descriptor.json 2018-07-21T05:32:49,572 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c0f5c3b1c664085b1dc4372eeb8a5c2/307_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,574 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,574 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743349_2525, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/307_index.zip 2018-07-21T05:32:49,579 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0c0f5c3b1c664085b1dc4372eeb8a5c2/307_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:49,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,583 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743350_2526, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_307.json 2018-07-21T05:32:49,588 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_307.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,607 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,608 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743351_2527, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/322a007622d04b10a2ee163ee46ba23c/308_descriptor.json 2018-07-21T05:32:49,612 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/322a007622d04b10a2ee163ee46ba23c/308_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,614 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743352_2528, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/308_index.zip 2018-07-21T05:32:49,618 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/322a007622d04b10a2ee163ee46ba23c/308_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,622 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,622 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743353_2529, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_308.json 2018-07-21T05:32:49,627 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_308.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,646 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,646 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743354_2530, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/697265ea16ca41eb848dc1a0bf69a98b/309_descriptor.json 2018-07-21T05:32:49,651 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/697265ea16ca41eb848dc1a0bf69a98b/309_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,654 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,654 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743355_2531, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/309_index.zip 2018-07-21T05:32:49,658 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/697265ea16ca41eb848dc1a0bf69a98b/309_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,661 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,662 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743356_2532, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_309.json 2018-07-21T05:32:49,668 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_309.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:49,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,689 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743357_2533, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b7bdd3f7c3e4388a0feab1a6c25fc06/310_descriptor.json 2018-07-21T05:32:49,695 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b7bdd3f7c3e4388a0feab1a6c25fc06/310_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:49,697 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,697 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743358_2534, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/310_index.zip 2018-07-21T05:32:49,705 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2b7bdd3f7c3e4388a0feab1a6c25fc06/310_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:49,710 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,710 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743359_2535, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_310.json 2018-07-21T05:32:49,717 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_310.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:49,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,738 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743360_2536, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e8cb8f50be44ab8a1c4ce7b8010c07/311_descriptor.json 2018-07-21T05:32:49,742 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e8cb8f50be44ab8a1c4ce7b8010c07/311_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:49,744 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,744 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743361_2537, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/311_index.zip 2018-07-21T05:32:49,749 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/90e8cb8f50be44ab8a1c4ce7b8010c07/311_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,755 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,755 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743362_2538, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_311.json 2018-07-21T05:32:49,761 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_311.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,784 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:49,785 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,785 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743363_2539, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c5fc423f3364ad19ef5419e94a9bd86/312_descriptor.json 2018-07-21T05:32:49,790 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c5fc423f3364ad19ef5419e94a9bd86/312_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,792 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,792 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743364_2540, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/312_index.zip 2018-07-21T05:32:49,797 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5c5fc423f3364ad19ef5419e94a9bd86/312_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:49,801 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,801 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743365_2541, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_312.json 2018-07-21T05:32:49,809 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_312.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,836 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,836 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,836 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,837 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,837 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743366_2542, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3f20c752cede45efa629a01af5839b4c/313_descriptor.json 2018-07-21T05:32:49,841 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3f20c752cede45efa629a01af5839b4c/313_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,843 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743367_2543, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/313_index.zip 2018-07-21T05:32:49,848 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3f20c752cede45efa629a01af5839b4c/313_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,855 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,856 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743368_2544, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_313.json 2018-07-21T05:32:49,861 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_313.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,881 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,882 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,882 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743369_2545, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a6c10d877e041ba9a357da6652aead5/314_descriptor.json 2018-07-21T05:32:49,897 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a6c10d877e041ba9a357da6652aead5/314_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,900 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,900 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743370_2546, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/314_index.zip 2018-07-21T05:32:49,912 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5a6c10d877e041ba9a357da6652aead5/314_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,916 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743371_2547, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_314.json 2018-07-21T05:32:49,938 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_314.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,962 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:49,962 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743372_2548, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26557f1d1404d3b838fd14f1ec933cb/315_descriptor.json 2018-07-21T05:32:49,971 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26557f1d1404d3b838fd14f1ec933cb/315_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:49,976 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:49,976 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743373_2549, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/315_index.zip 2018-07-21T05:32:49,985 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a26557f1d1404d3b838fd14f1ec933cb/315_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:49,989 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:49,990 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743374_2550, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_315.json 2018-07-21T05:32:49,994 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_315.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,011 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,011 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743375_2551, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd8450feba8c4587870eb1fbc03aaf14/316_descriptor.json 2018-07-21T05:32:50,016 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd8450feba8c4587870eb1fbc03aaf14/316_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,018 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,018 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,018 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,018 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,018 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:50,019 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,019 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743376_2552, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/316_index.zip 2018-07-21T05:32:50,023 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fd8450feba8c4587870eb1fbc03aaf14/316_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,026 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,026 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743377_2553, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_316.json 2018-07-21T05:32:50,032 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_316.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,050 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,050 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743378_2554, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db7da2a305ef4cb886eee099a6e5f1bb/317_descriptor.json 2018-07-21T05:32:50,054 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db7da2a305ef4cb886eee099a6e5f1bb/317_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,056 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,056 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,057 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,057 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743379_2555, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/317_index.zip 2018-07-21T05:32:50,065 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db7da2a305ef4cb886eee099a6e5f1bb/317_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:50,069 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,069 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743380_2556, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_317.json 2018-07-21T05:32:50,079 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_317.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:50,097 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,097 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743381_2557, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49bd94c31b1d41399ecaf546d2546391/318_descriptor.json 2018-07-21T05:32:50,101 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49bd94c31b1d41399ecaf546d2546391/318_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:50,103 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,103 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743382_2558, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/318_index.zip 2018-07-21T05:32:50,108 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49bd94c31b1d41399ecaf546d2546391/318_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,111 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:50,112 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743383_2559, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_318.json 2018-07-21T05:32:50,121 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_318.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:50,140 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:50,140 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743384_2560, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d29708f37d3441d2bb2fa933fa6529a3/319_descriptor.json 2018-07-21T05:32:50,547 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d29708f37d3441d2bb2fa933fa6529a3/319_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:50,552 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,553 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743385_2561, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/319_index.zip 2018-07-21T05:32:50,558 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d29708f37d3441d2bb2fa933fa6529a3/319_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:50,565 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:50,566 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743386_2562, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_319.json 2018-07-21T05:32:50,972 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_319.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:50,999 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,000 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,000 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743387_2563, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ce5d116a84e42b3b0b8d354cc1e342d/320_descriptor.json 2018-07-21T05:32:51,004 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ce5d116a84e42b3b0b8d354cc1e342d/320_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,006 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,006 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743388_2564, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/320_index.zip 2018-07-21T05:32:51,010 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ce5d116a84e42b3b0b8d354cc1e342d/320_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,014 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,014 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743389_2565, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_320.json 2018-07-21T05:32:51,019 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_320.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,041 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,041 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743390_2566, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7be28466eed427e895a5b3d157f6342/321_descriptor.json 2018-07-21T05:32:51,046 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7be28466eed427e895a5b3d157f6342/321_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,048 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,048 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743391_2567, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/321_index.zip 2018-07-21T05:32:51,052 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7be28466eed427e895a5b3d157f6342/321_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,056 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,056 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743392_2568, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_321.json 2018-07-21T05:32:51,061 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_321.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,083 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,083 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743393_2569, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/63f4b069d6b54abe85b166ddb55b3ad3/322_descriptor.json 2018-07-21T05:32:51,090 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/63f4b069d6b54abe85b166ddb55b3ad3/322_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,092 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,092 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743394_2570, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/322_index.zip 2018-07-21T05:32:51,097 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/63f4b069d6b54abe85b166ddb55b3ad3/322_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,101 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,101 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743395_2571, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_322.json 2018-07-21T05:32:51,109 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_322.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,128 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,129 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743396_2572, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1d1af88d341ada65a280946df1276/323_descriptor.json 2018-07-21T05:32:51,133 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1d1af88d341ada65a280946df1276/323_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,135 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,135 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743397_2573, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/323_index.zip 2018-07-21T05:32:51,139 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09d1d1af88d341ada65a280946df1276/323_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,142 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,142 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743398_2574, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_323.json 2018-07-21T05:32:51,146 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_323.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,163 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,163 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743399_2575, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f01c52b0affc47fda7107340a90a9a92/324_descriptor.json 2018-07-21T05:32:51,167 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f01c52b0affc47fda7107340a90a9a92/324_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,169 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,169 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743400_2576, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/324_index.zip 2018-07-21T05:32:51,173 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f01c52b0affc47fda7107340a90a9a92/324_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,176 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,176 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743401_2577, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_324.json 2018-07-21T05:32:51,180 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_324.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,197 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,198 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743402_2578, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba31305cd7584104971b276b3a05ef2f/325_descriptor.json 2018-07-21T05:32:51,202 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba31305cd7584104971b276b3a05ef2f/325_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,204 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,204 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743403_2579, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/325_index.zip 2018-07-21T05:32:51,208 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ba31305cd7584104971b276b3a05ef2f/325_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,211 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,211 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743404_2580, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_325.json 2018-07-21T05:32:51,214 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_325.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,233 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743405_2581, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a24636b85aa447fa4d9b833c5268d0f/326_descriptor.json 2018-07-21T05:32:51,238 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a24636b85aa447fa4d9b833c5268d0f/326_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,239 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743406_2582, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/326_index.zip 2018-07-21T05:32:51,243 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a24636b85aa447fa4d9b833c5268d0f/326_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,246 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743407_2583, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_326.json 2018-07-21T05:32:51,249 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_326.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,266 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,266 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743408_2584, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e5c0ce95dff34d36aba12ad31226f9e7/327_descriptor.json 2018-07-21T05:32:51,271 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e5c0ce95dff34d36aba12ad31226f9e7/327_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,272 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,272 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,273 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743409_2585, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/327_index.zip 2018-07-21T05:32:51,276 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e5c0ce95dff34d36aba12ad31226f9e7/327_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,279 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,279 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743410_2586, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_327.json 2018-07-21T05:32:51,283 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_327.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,304 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,304 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743411_2587, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/debda0ceafbd4b39acc40db5f3aee5a6/328_descriptor.json 2018-07-21T05:32:51,308 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/debda0ceafbd4b39acc40db5f3aee5a6/328_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,310 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,310 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743412_2588, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/328_index.zip 2018-07-21T05:32:51,314 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/debda0ceafbd4b39acc40db5f3aee5a6/328_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,317 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,317 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743413_2589, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_328.json 2018-07-21T05:32:51,321 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_328.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,338 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,338 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743414_2590, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c39c03d65e23414e8b5995314a82f6ef/329_descriptor.json 2018-07-21T05:32:51,342 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c39c03d65e23414e8b5995314a82f6ef/329_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,343 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,344 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,344 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743415_2591, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/329_index.zip 2018-07-21T05:32:51,387 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c39c03d65e23414e8b5995314a82f6ef/329_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,391 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,391 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743416_2592, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_329.json 2018-07-21T05:32:51,394 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_329.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,410 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,410 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743417_2593, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61859c7aaa404721b107a9cdf37284be/330_descriptor.json 2018-07-21T05:32:51,413 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61859c7aaa404721b107a9cdf37284be/330_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,417 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,417 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743418_2594, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/330_index.zip 2018-07-21T05:32:51,420 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/61859c7aaa404721b107a9cdf37284be/330_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:51,423 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,423 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743419_2595, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_330.json 2018-07-21T05:32:51,427 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_330.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,443 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,443 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743420_2596, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc183fae0982441985ad3838c64599c9/331_descriptor.json 2018-07-21T05:32:51,447 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc183fae0982441985ad3838c64599c9/331_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,449 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,449 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743421_2597, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/331_index.zip 2018-07-21T05:32:51,453 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dc183fae0982441985ad3838c64599c9/331_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,456 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,456 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743422_2598, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_331.json 2018-07-21T05:32:51,460 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_331.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:51,476 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,476 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743423_2599, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e8cbef867d64a59982d3255725b7f43/332_descriptor.json 2018-07-21T05:32:51,481 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e8cbef867d64a59982d3255725b7f43/332_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,483 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743424_2600, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/332_index.zip 2018-07-21T05:32:51,487 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8e8cbef867d64a59982d3255725b7f43/332_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,490 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,490 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743425_2601, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_332.json 2018-07-21T05:32:51,494 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_332.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,510 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,511 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743426_2602, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09055963d02346d7821a39727a63c9a4/333_descriptor.json 2018-07-21T05:32:51,515 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09055963d02346d7821a39727a63c9a4/333_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,516 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,516 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,516 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,517 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743427_2603, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/333_index.zip 2018-07-21T05:32:51,520 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09055963d02346d7821a39727a63c9a4/333_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,523 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,524 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743428_2604, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_333.json 2018-07-21T05:32:51,527 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_333.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,544 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,544 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743429_2605, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9af55f1f59d4db9b9781e16ad520daa/334_descriptor.json 2018-07-21T05:32:51,550 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9af55f1f59d4db9b9781e16ad520daa/334_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,552 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,552 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743430_2606, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/334_index.zip 2018-07-21T05:32:51,559 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b9af55f1f59d4db9b9781e16ad520daa/334_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,562 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,562 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743431_2607, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_334.json 2018-07-21T05:32:51,566 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_334.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,582 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,582 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743432_2608, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17403d0fcfe044feb73fb6a68528ffb4/335_descriptor.json 2018-07-21T05:32:51,586 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17403d0fcfe044feb73fb6a68528ffb4/335_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,588 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,588 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743433_2609, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/335_index.zip 2018-07-21T05:32:51,591 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/17403d0fcfe044feb73fb6a68528ffb4/335_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,594 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,594 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743434_2610, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_335.json 2018-07-21T05:32:51,598 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_335.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,618 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,618 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,618 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:51,618 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:51,619 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,619 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,619 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,619 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:51,619 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,619 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743435_2611, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ffdf1317a5a42ccb152fb336641f746/336_descriptor.json 2018-07-21T05:32:51,623 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ffdf1317a5a42ccb152fb336641f746/336_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,624 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,624 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743436_2612, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/336_index.zip 2018-07-21T05:32:51,629 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7ffdf1317a5a42ccb152fb336641f746/336_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,632 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,632 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743437_2613, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_336.json 2018-07-21T05:32:51,636 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_336.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,656 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743438_2614, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5480223182049f19750807e446e80e6/337_descriptor.json 2018-07-21T05:32:51,663 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5480223182049f19750807e446e80e6/337_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:51,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:51,668 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743439_2615, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/337_index.zip 2018-07-21T05:32:51,673 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5480223182049f19750807e446e80e6/337_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:51,676 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,676 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743440_2616, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_337.json 2018-07-21T05:32:51,679 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_337.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,695 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:51,696 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:51,696 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743441_2617, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06a83a0fc9ea43eb8ad4cfc6cd478391/338_descriptor.json 2018-07-21T05:32:51,700 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06a83a0fc9ea43eb8ad4cfc6cd478391/338_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:51,701 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,701 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:51,702 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:51,702 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743442_2618, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/338_index.zip 2018-07-21T05:32:52,106 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/06a83a0fc9ea43eb8ad4cfc6cd478391/338_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:52,110 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,110 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743443_2619, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_338.json 2018-07-21T05:32:52,114 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_338.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,132 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,132 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743444_2620, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd0f9a84b4a74680a1891a9b28c01c28/339_descriptor.json 2018-07-21T05:32:52,136 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd0f9a84b4a74680a1891a9b28c01c28/339_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,138 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,138 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743445_2621, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/339_index.zip 2018-07-21T05:32:52,544 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bd0f9a84b4a74680a1891a9b28c01c28/339_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:52,548 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,548 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743446_2622, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_339.json 2018-07-21T05:32:52,553 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_339.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,569 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,569 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,569 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:52,570 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,570 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743447_2623, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f00c998e4a04899b6e0f797d57a297a/340_descriptor.json 2018-07-21T05:32:52,573 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f00c998e4a04899b6e0f797d57a297a/340_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:52,575 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,575 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743448_2624, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/340_index.zip 2018-07-21T05:32:52,579 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f00c998e4a04899b6e0f797d57a297a/340_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,582 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,582 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743449_2625, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_340.json 2018-07-21T05:32:52,586 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_340.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:52,603 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,603 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743450_2626, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b46037ec80242538643d8387556533e/341_descriptor.json 2018-07-21T05:32:52,607 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b46037ec80242538643d8387556533e/341_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:52,609 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,609 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743451_2627, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/341_index.zip 2018-07-21T05:32:52,613 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6b46037ec80242538643d8387556533e/341_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,616 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,616 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743452_2628, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_341.json 2018-07-21T05:32:52,620 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_341.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:52,637 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,637 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743453_2629, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0075edf449a448018fe1adfa8964cc29/342_descriptor.json 2018-07-21T05:32:52,641 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0075edf449a448018fe1adfa8964cc29/342_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:52,643 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,643 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743454_2630, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/342_index.zip 2018-07-21T05:32:52,647 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0075edf449a448018fe1adfa8964cc29/342_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:52,650 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,650 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743455_2631, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_342.json 2018-07-21T05:32:52,654 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_342.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:52,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,671 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743456_2632, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5d53ebf54407445fa292cee765ba9ec6/343_descriptor.json 2018-07-21T05:32:52,675 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5d53ebf54407445fa292cee765ba9ec6/343_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:52,677 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,678 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743457_2633, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/343_index.zip 2018-07-21T05:32:52,681 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5d53ebf54407445fa292cee765ba9ec6/343_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:52,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,684 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743458_2634, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_343.json 2018-07-21T05:32:52,688 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_343.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:52,708 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,708 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743459_2635, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6678363ec8c4025ba10b964dc6b80cc/344_descriptor.json 2018-07-21T05:32:52,712 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6678363ec8c4025ba10b964dc6b80cc/344_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:52,714 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,714 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743460_2636, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/344_index.zip 2018-07-21T05:32:52,718 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b6678363ec8c4025ba10b964dc6b80cc/344_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:52,721 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,721 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743461_2637, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_344.json 2018-07-21T05:32:52,725 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_344.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:52,741 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,741 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743462_2638, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e602bd90a7a4207a218b29ed4a7cab2/345_descriptor.json 2018-07-21T05:32:52,745 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e602bd90a7a4207a218b29ed4a7cab2/345_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:52,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:52,746 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743463_2639, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/345_index.zip 2018-07-21T05:32:52,750 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4e602bd90a7a4207a218b29ed4a7cab2/345_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:52,753 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:52,753 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743464_2640, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_345.json 2018-07-21T05:32:53,157 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_345.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,174 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743465_2641, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f28e904df5946929e51b63db4055c90/346_descriptor.json 2018-07-21T05:32:53,178 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f28e904df5946929e51b63db4055c90/346_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,180 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,180 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743466_2642, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/346_index.zip 2018-07-21T05:32:53,183 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2f28e904df5946929e51b63db4055c90/346_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,186 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:53,187 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,187 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,187 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743467_2643, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_346.json 2018-07-21T05:32:53,190 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_346.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,207 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,207 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743468_2644, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9beb01395a684beb81f6503cb5eabc4a/347_descriptor.json 2018-07-21T05:32:53,211 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9beb01395a684beb81f6503cb5eabc4a/347_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,213 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,213 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743469_2645, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/347_index.zip 2018-07-21T05:32:53,216 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9beb01395a684beb81f6503cb5eabc4a/347_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,219 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,219 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743470_2646, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_347.json 2018-07-21T05:32:53,222 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_347.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,242 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,243 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,243 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,243 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,243 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743471_2647, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2994426e92747f8a6a6e503cdfc33ea/348_descriptor.json 2018-07-21T05:32:53,247 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2994426e92747f8a6a6e503cdfc33ea/348_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,249 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,249 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743472_2648, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/348_index.zip 2018-07-21T05:32:53,253 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2994426e92747f8a6a6e503cdfc33ea/348_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,257 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,257 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743473_2649, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_348.json 2018-07-21T05:32:53,262 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_348.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,279 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,279 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743474_2650, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e8b2c65ae87644c086351e16db9c109b/349_descriptor.json 2018-07-21T05:32:53,283 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e8b2c65ae87644c086351e16db9c109b/349_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:53,284 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,284 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743475_2651, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/349_index.zip 2018-07-21T05:32:53,288 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e8b2c65ae87644c086351e16db9c109b/349_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,291 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,291 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743476_2652, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_349.json 2018-07-21T05:32:53,294 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_349.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,312 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,312 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743477_2653, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b4c13597fef40b1891e4b36cfea8e7b/350_descriptor.json 2018-07-21T05:32:53,316 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b4c13597fef40b1891e4b36cfea8e7b/350_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,318 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,318 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743478_2654, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/350_index.zip 2018-07-21T05:32:53,322 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5b4c13597fef40b1891e4b36cfea8e7b/350_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,324 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,324 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,325 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,325 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743479_2655, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_350.json 2018-07-21T05:32:53,329 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_350.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:53,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,350 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743480_2656, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ce71e65e3f0453f834edf616231971e/351_descriptor.json 2018-07-21T05:32:53,354 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ce71e65e3f0453f834edf616231971e/351_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,355 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,355 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,355 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,355 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,355 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,356 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,356 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,356 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,356 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743481_2657, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/351_index.zip 2018-07-21T05:32:53,359 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6ce71e65e3f0453f834edf616231971e/351_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,362 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,362 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:53,363 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,363 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743482_2658, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_351.json 2018-07-21T05:32:53,367 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_351.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,384 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,384 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743483_2659, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b582446854194ab7ae08263341cee3ed/352_descriptor.json 2018-07-21T05:32:53,390 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b582446854194ab7ae08263341cee3ed/352_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,392 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,392 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743484_2660, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/352_index.zip 2018-07-21T05:32:53,396 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b582446854194ab7ae08263341cee3ed/352_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,399 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,399 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743485_2661, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_352.json 2018-07-21T05:32:53,403 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_352.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,419 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,419 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743486_2662, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca694f82645e41558d82827e0e074d31/353_descriptor.json 2018-07-21T05:32:53,424 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca694f82645e41558d82827e0e074d31/353_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,430 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,430 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743487_2663, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/353_index.zip 2018-07-21T05:32:53,434 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca694f82645e41558d82827e0e074d31/353_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,437 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,437 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743488_2664, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_353.json 2018-07-21T05:32:53,446 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_353.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,463 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,464 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743489_2665, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0ce1a5964f44c26ac1144b8f544ce55/354_descriptor.json 2018-07-21T05:32:53,468 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0ce1a5964f44c26ac1144b8f544ce55/354_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,475 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,475 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743490_2666, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/354_index.zip 2018-07-21T05:32:53,480 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f0ce1a5964f44c26ac1144b8f544ce55/354_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:53,485 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,486 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743491_2667, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_354.json 2018-07-21T05:32:53,891 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_354.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,909 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,909 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743492_2668, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38a414a80f7e44b99aaf1e937f814d37/355_descriptor.json 2018-07-21T05:32:53,913 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38a414a80f7e44b99aaf1e937f814d37/355_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,914 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,914 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,914 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,914 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,914 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:53,915 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,915 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743493_2669, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/355_index.zip 2018-07-21T05:32:53,918 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/38a414a80f7e44b99aaf1e937f814d37/355_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:53,921 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,921 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743494_2670, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_355.json 2018-07-21T05:32:53,925 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_355.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,942 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,942 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743495_2671, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9a363351edd74df3a5acd9dc540059c2/356_descriptor.json 2018-07-21T05:32:53,949 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9a363351edd74df3a5acd9dc540059c2/356_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:53,951 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:53,951 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743496_2672, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/356_index.zip 2018-07-21T05:32:53,957 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9a363351edd74df3a5acd9dc540059c2/356_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:53,960 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,960 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743497_2673, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_356.json 2018-07-21T05:32:53,964 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_356.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,981 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:53,981 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743498_2674, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2db67816d0149ac8e74d8b3561b977c/357_descriptor.json 2018-07-21T05:32:53,985 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2db67816d0149ac8e74d8b3561b977c/357_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:53,987 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,987 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743499_2675, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/357_index.zip 2018-07-21T05:32:53,993 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b2db67816d0149ac8e74d8b3561b977c/357_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:53,995 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:53,996 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:53,996 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743500_2676, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_357.json 2018-07-21T05:32:53,999 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_357.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,016 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,016 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,017 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,017 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743501_2677, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3eb5ed516374105be4ec0bc67dfb5c8/358_descriptor.json 2018-07-21T05:32:54,021 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3eb5ed516374105be4ec0bc67dfb5c8/358_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,022 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,022 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,023 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,023 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743502_2678, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/358_index.zip 2018-07-21T05:32:54,030 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3eb5ed516374105be4ec0bc67dfb5c8/358_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,033 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,033 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743503_2679, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_358.json 2018-07-21T05:32:54,037 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_358.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,057 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,057 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743504_2680, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65513ba09e33416188f757b10cfd71eb/359_descriptor.json 2018-07-21T05:32:54,064 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65513ba09e33416188f757b10cfd71eb/359_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,066 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,067 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743505_2681, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/359_index.zip 2018-07-21T05:32:54,071 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/65513ba09e33416188f757b10cfd71eb/359_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,074 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743506_2682, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_359.json 2018-07-21T05:32:54,078 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_359.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,095 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:54,096 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,096 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743507_2683, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f483e450b154687baef013ecfb6d8c9/360_descriptor.json 2018-07-21T05:32:54,100 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f483e450b154687baef013ecfb6d8c9/360_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,101 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,101 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743508_2684, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/360_index.zip 2018-07-21T05:32:54,105 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f483e450b154687baef013ecfb6d8c9/360_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,108 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,108 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743509_2685, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_360.json 2018-07-21T05:32:54,112 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_360.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,129 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743510_2686, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/73ab03533899434f90cc1c953205773b/361_descriptor.json 2018-07-21T05:32:54,133 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/73ab03533899434f90cc1c953205773b/361_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,134 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,134 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,134 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,134 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,135 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,135 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,135 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:54,135 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,135 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743511_2687, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/361_index.zip 2018-07-21T05:32:54,140 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/73ab03533899434f90cc1c953205773b/361_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,143 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,143 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743512_2688, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_361.json 2018-07-21T05:32:54,147 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_361.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:54,164 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,164 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743513_2689, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b34061bb0bc544a1af37c3f22220a2d4/362_descriptor.json 2018-07-21T05:32:54,168 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b34061bb0bc544a1af37c3f22220a2d4/362_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,170 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,170 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743514_2690, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/362_index.zip 2018-07-21T05:32:54,173 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b34061bb0bc544a1af37c3f22220a2d4/362_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,176 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,176 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743515_2691, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_362.json 2018-07-21T05:32:54,180 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_362.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,197 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,197 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743516_2692, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/37200f920ee1494985f8c0d70d35058f/363_descriptor.json 2018-07-21T05:32:54,201 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/37200f920ee1494985f8c0d70d35058f/363_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,203 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,203 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743517_2693, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/363_index.zip 2018-07-21T05:32:54,207 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/37200f920ee1494985f8c0d70d35058f/363_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,210 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,210 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743518_2694, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_363.json 2018-07-21T05:32:54,213 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_363.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:54,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,232 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743519_2695, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762762932db146208c1734f09b6297ac/364_descriptor.json 2018-07-21T05:32:54,236 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762762932db146208c1734f09b6297ac/364_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,238 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,238 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743520_2696, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/364_index.zip 2018-07-21T05:32:54,241 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/762762932db146208c1734f09b6297ac/364_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,244 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,245 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,245 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743521_2697, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_364.json 2018-07-21T05:32:54,249 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_364.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,268 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,268 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743522_2698, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/85c2b1dc6baf47fb9e9cd2d97dcaba5a/365_descriptor.json 2018-07-21T05:32:54,272 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/85c2b1dc6baf47fb9e9cd2d97dcaba5a/365_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,273 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,274 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,274 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743523_2699, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/365_index.zip 2018-07-21T05:32:54,278 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/85c2b1dc6baf47fb9e9cd2d97dcaba5a/365_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,281 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,281 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743524_2700, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_365.json 2018-07-21T05:32:54,285 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_365.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,304 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,304 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,305 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,305 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743525_2701, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/18932b2ce0364697b791f9efb087ce43/366_descriptor.json 2018-07-21T05:32:54,309 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/18932b2ce0364697b791f9efb087ce43/366_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,311 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,311 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743526_2702, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/366_index.zip 2018-07-21T05:32:54,316 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/18932b2ce0364697b791f9efb087ce43/366_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,319 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,320 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743527_2703, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_366.json 2018-07-21T05:32:54,329 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_366.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,352 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,352 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743528_2704, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a959f49cc01f4640babf29b653499c46/367_descriptor.json 2018-07-21T05:32:54,356 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a959f49cc01f4640babf29b653499c46/367_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,358 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,358 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743529_2705, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/367_index.zip 2018-07-21T05:32:54,367 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a959f49cc01f4640babf29b653499c46/367_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,371 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,371 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743530_2706, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_367.json 2018-07-21T05:32:54,375 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_367.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:54,400 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:54,400 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743531_2707, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e1a19c10a5c048818c1aa037bb7f17fe/368_descriptor.json 2018-07-21T05:32:54,404 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e1a19c10a5c048818c1aa037bb7f17fe/368_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:54,406 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,406 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743532_2708, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/368_index.zip 2018-07-21T05:32:54,410 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e1a19c10a5c048818c1aa037bb7f17fe/368_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:54,414 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,414 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743533_2709, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_368.json 2018-07-21T05:32:54,563 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:54,819 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_368.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:54,999 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:54,999 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743534_2710, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c50b38f8e19e4d43b8359e871c21af74/369_descriptor.json 2018-07-21T05:32:55,004 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c50b38f8e19e4d43b8359e871c21af74/369_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,006 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,006 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743535_2711, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/369_index.zip 2018-07-21T05:32:55,010 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c50b38f8e19e4d43b8359e871c21af74/369_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,013 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,014 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,014 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743536_2712, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_369.json 2018-07-21T05:32:55,018 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_369.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,036 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,036 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743537_2713, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4acdaf5d553143e1bcd03453e943afce/370_descriptor.json 2018-07-21T05:32:55,040 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4acdaf5d553143e1bcd03453e943afce/370_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,042 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743538_2714, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/370_index.zip 2018-07-21T05:32:55,046 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4acdaf5d553143e1bcd03453e943afce/370_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,049 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,049 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743539_2715, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_370.json 2018-07-21T05:32:55,056 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_370.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,075 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743540_2716, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09daa3a2cc644ecc811cd3a521922c8f/371_descriptor.json 2018-07-21T05:32:55,080 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09daa3a2cc644ecc811cd3a521922c8f/371_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,082 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743541_2717, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/371_index.zip 2018-07-21T05:32:55,088 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/09daa3a2cc644ecc811cd3a521922c8f/371_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,092 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,092 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743542_2718, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_371.json 2018-07-21T05:32:55,097 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_371.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,118 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,118 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743543_2719, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc5de2507e324463a28bf2914ed15a84/372_descriptor.json 2018-07-21T05:32:55,123 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc5de2507e324463a28bf2914ed15a84/372_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,126 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,126 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743544_2720, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/372_index.zip 2018-07-21T05:32:55,132 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc5de2507e324463a28bf2914ed15a84/372_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,135 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,135 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743545_2721, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_372.json 2018-07-21T05:32:55,139 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_372.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,165 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,165 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743546_2722, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8f5f4a7cd6d4729af78184f94a08f69/373_descriptor.json 2018-07-21T05:32:55,169 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8f5f4a7cd6d4729af78184f94a08f69/373_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:55,171 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,171 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743547_2723, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/373_index.zip 2018-07-21T05:32:55,181 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8f5f4a7cd6d4729af78184f94a08f69/373_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,185 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,185 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743548_2724, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_373.json 2018-07-21T05:32:55,197 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_373.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,225 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,226 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743549_2725, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57107e03f3ae4887b447df161defed50/374_descriptor.json 2018-07-21T05:32:55,233 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57107e03f3ae4887b447df161defed50/374_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,235 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,235 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743550_2726, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/374_index.zip 2018-07-21T05:32:55,239 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57107e03f3ae4887b447df161defed50/374_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,242 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,242 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743551_2727, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_374.json 2018-07-21T05:32:55,247 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_374.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,264 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,264 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743552_2728, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f965cabdc2f4bc7a61a9703df3e0b51/375_descriptor.json 2018-07-21T05:32:55,268 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f965cabdc2f4bc7a61a9703df3e0b51/375_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,270 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,270 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743553_2729, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/375_index.zip 2018-07-21T05:32:55,274 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9f965cabdc2f4bc7a61a9703df3e0b51/375_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,276 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,277 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,277 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743554_2730, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_375.json 2018-07-21T05:32:55,282 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_375.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,303 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,304 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743555_2731, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/abfd96cfbeb84a729abb5a71ed029448/376_descriptor.json 2018-07-21T05:32:55,307 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/abfd96cfbeb84a729abb5a71ed029448/376_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,309 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,309 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743556_2732, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/376_index.zip 2018-07-21T05:32:55,313 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/abfd96cfbeb84a729abb5a71ed029448/376_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,316 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,316 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743557_2733, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_376.json 2018-07-21T05:32:55,320 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_376.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,336 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,336 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743558_2734, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3641d06d3f141a7b17492de7a7df96c/377_descriptor.json 2018-07-21T05:32:55,340 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3641d06d3f141a7b17492de7a7df96c/377_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,342 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,342 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743559_2735, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/377_index.zip 2018-07-21T05:32:55,346 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d3641d06d3f141a7b17492de7a7df96c/377_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,349 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,349 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743560_2736, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_377.json 2018-07-21T05:32:55,353 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_377.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,370 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,370 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743561_2737, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/759656631a5c4068a1a1d7067e48810f/378_descriptor.json 2018-07-21T05:32:55,386 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/759656631a5c4068a1a1d7067e48810f/378_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,387 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,387 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743562_2738, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/378_index.zip 2018-07-21T05:32:55,416 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/759656631a5c4068a1a1d7067e48810f/378_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,419 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,419 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743563_2739, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_378.json 2018-07-21T05:32:55,425 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_378.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,445 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,445 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743564_2740, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/25da17c3e37e4a5cb9ba671c8b791b7b/379_descriptor.json 2018-07-21T05:32:55,469 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/25da17c3e37e4a5cb9ba671c8b791b7b/379_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,471 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,471 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,471 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,471 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,471 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,471 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743565_2741, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/379_index.zip 2018-07-21T05:32:55,499 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/25da17c3e37e4a5cb9ba671c8b791b7b/379_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,507 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,507 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743566_2742, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_379.json 2018-07-21T05:32:55,536 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_379.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,552 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,552 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743567_2743, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cda1db83ccca4f7ab833c65c9e115658/380_descriptor.json 2018-07-21T05:32:55,558 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cda1db83ccca4f7ab833c65c9e115658/380_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,561 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,561 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743568_2744, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/380_index.zip 2018-07-21T05:32:55,569 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cda1db83ccca4f7ab833c65c9e115658/380_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,595 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,596 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,596 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743569_2745, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_380.json 2018-07-21T05:32:55,620 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_380.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,636 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,636 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743570_2746, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/30d3e832cbfe43149f33d898f317d2f8/381_descriptor.json 2018-07-21T05:32:55,640 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/30d3e832cbfe43149f33d898f317d2f8/381_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:55,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,642 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743571_2747, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/381_index.zip 2018-07-21T05:32:55,645 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/30d3e832cbfe43149f33d898f317d2f8/381_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,648 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,648 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743572_2748, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_381.json 2018-07-21T05:32:55,652 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_381.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,668 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,669 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,669 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,669 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,669 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:55,669 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,669 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743573_2749, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7908e93e5ce942b7b23f395cce43897a/382_descriptor.json 2018-07-21T05:32:55,673 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7908e93e5ce942b7b23f395cce43897a/382_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,674 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,674 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743574_2750, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/382_index.zip 2018-07-21T05:32:55,678 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7908e93e5ce942b7b23f395cce43897a/382_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,681 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,681 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743575_2751, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_382.json 2018-07-21T05:32:55,685 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_382.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:55,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,701 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743576_2752, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8313af0d42134a70b469bc243ced9d84/383_descriptor.json 2018-07-21T05:32:55,705 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8313af0d42134a70b469bc243ced9d84/383_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,707 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,707 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743577_2753, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/383_index.zip 2018-07-21T05:32:55,711 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8313af0d42134a70b469bc243ced9d84/383_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,714 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,714 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743578_2754, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_383.json 2018-07-21T05:32:55,717 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_383.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:55,762 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,762 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743579_2755, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bba2fc1424484d9569bde2022747d0/384_descriptor.json 2018-07-21T05:32:55,785 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bba2fc1424484d9569bde2022747d0/384_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,788 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743580_2756, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/384_index.zip 2018-07-21T05:32:55,802 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b4bba2fc1424484d9569bde2022747d0/384_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:55,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:55,806 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:55,806 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743581_2757, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_384.json 2018-07-21T05:32:56,212 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_384.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,232 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,232 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743582_2758, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fffa67dd93b64341a068ac3905f09989/385_descriptor.json 2018-07-21T05:32:56,237 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fffa67dd93b64341a068ac3905f09989/385_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,238 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,239 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743583_2759, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/385_index.zip 2018-07-21T05:32:56,242 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fffa67dd93b64341a068ac3905f09989/385_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,246 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,246 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743584_2760, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_385.json 2018-07-21T05:32:56,251 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_385.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,267 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,267 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,268 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,268 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743585_2761, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/730d69800845451a959b67835942e9a1/386_descriptor.json 2018-07-21T05:32:56,272 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/730d69800845451a959b67835942e9a1/386_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,274 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,274 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743586_2762, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/386_index.zip 2018-07-21T05:32:56,278 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/730d69800845451a959b67835942e9a1/386_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,281 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,281 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743587_2763, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_386.json 2018-07-21T05:32:56,285 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_386.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,301 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,301 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743588_2764, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/748e691d47ea417183478b128b742834/387_descriptor.json 2018-07-21T05:32:56,305 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/748e691d47ea417183478b128b742834/387_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,307 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,307 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743589_2765, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/387_index.zip 2018-07-21T05:32:56,311 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/748e691d47ea417183478b128b742834/387_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,313 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743590_2766, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_387.json 2018-07-21T05:32:56,317 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_387.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,334 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743591_2767, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ef77612433204312bc743c1f80288155/388_descriptor.json 2018-07-21T05:32:56,338 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ef77612433204312bc743c1f80288155/388_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,340 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,340 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743592_2768, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/388_index.zip 2018-07-21T05:32:56,344 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ef77612433204312bc743c1f80288155/388_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,347 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,348 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743593_2769, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_388.json 2018-07-21T05:32:56,352 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_388.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,367 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,367 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743594_2770, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/637abf4773c7435c889efeb3a5476030/389_descriptor.json 2018-07-21T05:32:56,372 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/637abf4773c7435c889efeb3a5476030/389_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,374 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743595_2771, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/389_index.zip 2018-07-21T05:32:56,386 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/637abf4773c7435c889efeb3a5476030/389_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,391 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,391 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743596_2772, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_389.json 2018-07-21T05:32:56,397 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_389.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,415 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,415 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743597_2773, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3148d733b6bb40bbb9a3918b618d0c82/390_descriptor.json 2018-07-21T05:32:56,420 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3148d733b6bb40bbb9a3918b618d0c82/390_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,422 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,422 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743598_2774, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/390_index.zip 2018-07-21T05:32:56,428 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3148d733b6bb40bbb9a3918b618d0c82/390_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,431 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,431 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743599_2775, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_390.json 2018-07-21T05:32:56,435 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_390.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,455 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,455 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743600_2776, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f495744495eb42379578eb1e45dce534/391_descriptor.json 2018-07-21T05:32:56,461 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f495744495eb42379578eb1e45dce534/391_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,463 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,464 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743601_2777, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/391_index.zip 2018-07-21T05:32:56,469 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f495744495eb42379578eb1e45dce534/391_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,472 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,473 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,473 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743602_2778, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_391.json 2018-07-21T05:32:56,480 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_391.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,503 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,503 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743603_2779, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3ed07ee05224c88ba4c125cbbec9074/392_descriptor.json 2018-07-21T05:32:56,508 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3ed07ee05224c88ba4c125cbbec9074/392_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,510 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,510 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743604_2780, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/392_index.zip 2018-07-21T05:32:56,515 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a3ed07ee05224c88ba4c125cbbec9074/392_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,518 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,518 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743605_2781, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_392.json 2018-07-21T05:32:56,522 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_392.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,538 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,539 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743606_2782, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eba209f97e914ab2837181ae68b86670/393_descriptor.json 2018-07-21T05:32:56,543 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eba209f97e914ab2837181ae68b86670/393_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,545 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,545 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,545 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,546 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,546 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,546 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,546 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:56,546 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,546 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743607_2783, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/393_index.zip 2018-07-21T05:32:56,550 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/eba209f97e914ab2837181ae68b86670/393_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,554 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,554 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743608_2784, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_393.json 2018-07-21T05:32:56,558 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_393.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,577 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,578 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,578 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,578 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,578 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,578 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743609_2785, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/26a52e8cf8b94847b916ee0b97c52489/394_descriptor.json 2018-07-21T05:32:56,582 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/26a52e8cf8b94847b916ee0b97c52489/394_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,583 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,583 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:56,584 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,584 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743610_2786, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/394_index.zip 2018-07-21T05:32:56,589 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/26a52e8cf8b94847b916ee0b97c52489/394_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,593 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,593 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743611_2787, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_394.json 2018-07-21T05:32:56,598 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_394.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,613 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,613 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743612_2788, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f31d7dc49ed44db6aa8ecd94fa185edb/395_descriptor.json 2018-07-21T05:32:56,617 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f31d7dc49ed44db6aa8ecd94fa185edb/395_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,619 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,619 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743613_2789, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/395_index.zip 2018-07-21T05:32:56,626 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f31d7dc49ed44db6aa8ecd94fa185edb/395_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,629 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,629 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743614_2790, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_395.json 2018-07-21T05:32:56,641 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_395.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,659 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,660 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743615_2791, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7748a8d3cefa4097bcd65186408f6674/396_descriptor.json 2018-07-21T05:32:56,665 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7748a8d3cefa4097bcd65186408f6674/396_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,667 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,668 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,668 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743616_2792, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/396_index.zip 2018-07-21T05:32:56,678 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7748a8d3cefa4097bcd65186408f6674/396_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:56,681 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,681 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743617_2793, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_396.json 2018-07-21T05:32:56,688 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_396.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,711 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,712 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,712 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,712 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,712 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,712 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,712 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743618_2794, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e8468452292475e8781d24e9d6c5616/397_descriptor.json 2018-07-21T05:32:56,717 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e8468452292475e8781d24e9d6c5616/397_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,719 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,719 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743619_2795, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/397_index.zip 2018-07-21T05:32:56,724 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5e8468452292475e8781d24e9d6c5616/397_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,727 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,727 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743620_2796, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_397.json 2018-07-21T05:32:56,732 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_397.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,753 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,754 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743621_2797, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/75b818e313df4b369983d72f9e65a988/398_descriptor.json 2018-07-21T05:32:56,758 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/75b818e313df4b369983d72f9e65a988/398_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,760 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,760 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743622_2798, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/398_index.zip 2018-07-21T05:32:56,765 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/75b818e313df4b369983d72f9e65a988/398_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,768 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,768 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743623_2799, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_398.json 2018-07-21T05:32:56,774 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_398.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,796 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,796 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,797 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,797 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743624_2800, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9f2d7c7a0014a2cbb57b676e1b29c92/399_descriptor.json 2018-07-21T05:32:56,802 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9f2d7c7a0014a2cbb57b676e1b29c92/399_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,803 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,804 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,804 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743625_2801, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/399_index.zip 2018-07-21T05:32:56,809 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d9f2d7c7a0014a2cbb57b676e1b29c92/399_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,813 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,813 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743626_2802, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_399.json 2018-07-21T05:32:56,819 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_399.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,843 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,843 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743627_2803, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5df56cd138c46e4b112465f7430fbef/400_descriptor.json 2018-07-21T05:32:56,850 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5df56cd138c46e4b112465f7430fbef/400_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,852 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,852 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743628_2804, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/400_index.zip 2018-07-21T05:32:56,856 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f5df56cd138c46e4b112465f7430fbef/400_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,860 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,860 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743629_2805, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_400.json 2018-07-21T05:32:56,864 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_400.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:56,881 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,881 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743630_2806, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6110b8ad4bd1482ba4914f78dca62e8c/401_descriptor.json 2018-07-21T05:32:56,886 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6110b8ad4bd1482ba4914f78dca62e8c/401_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,888 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,889 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743631_2807, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/401_index.zip 2018-07-21T05:32:56,893 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6110b8ad4bd1482ba4914f78dca62e8c/401_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,896 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,896 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743632_2808, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_401.json 2018-07-21T05:32:56,907 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_401.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,925 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,926 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:56,926 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743633_2809, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5ce795c57eb44808fc31b24cda958fb/402_descriptor.json 2018-07-21T05:32:56,930 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5ce795c57eb44808fc31b24cda958fb/402_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,932 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,932 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743634_2810, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/402_index.zip 2018-07-21T05:32:56,937 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a5ce795c57eb44808fc31b24cda958fb/402_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,940 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,940 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,940 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,940 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,940 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,941 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,941 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:56,941 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,941 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743635_2811, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_402.json 2018-07-21T05:32:56,945 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_402.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,962 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,962 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743636_2812, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b79fc20a6c094d7596c707d9e30cc8ca/403_descriptor.json 2018-07-21T05:32:56,967 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b79fc20a6c094d7596c707d9e30cc8ca/403_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:56,968 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,968 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743637_2813, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/403_index.zip 2018-07-21T05:32:56,972 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b79fc20a6c094d7596c707d9e30cc8ca/403_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:56,975 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:56,975 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743638_2814, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_403.json 2018-07-21T05:32:56,981 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_403.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,000 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,000 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743639_2815, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e6c94391bee432e9e6ca1886f1e6f98/404_descriptor.json 2018-07-21T05:32:57,004 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e6c94391bee432e9e6ca1886f1e6f98/404_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,006 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,006 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743640_2816, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/404_index.zip 2018-07-21T05:32:57,010 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7e6c94391bee432e9e6ca1886f1e6f98/404_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,013 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,013 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743641_2817, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_404.json 2018-07-21T05:32:57,017 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_404.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,049 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,049 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743642_2818, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a36d54d8ff444a795cee2761d6e2d3a/405_descriptor.json 2018-07-21T05:32:57,055 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a36d54d8ff444a795cee2761d6e2d3a/405_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,061 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,061 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743643_2819, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/405_index.zip 2018-07-21T05:32:57,070 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3a36d54d8ff444a795cee2761d6e2d3a/405_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:57,074 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,074 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743644_2820, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_405.json 2018-07-21T05:32:57,084 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_405.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,104 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,105 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,105 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743645_2821, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6175a1b8cf7d43a2ab0bd63f5ffbe36e/406_descriptor.json 2018-07-21T05:32:57,110 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6175a1b8cf7d43a2ab0bd63f5ffbe36e/406_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,112 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,112 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743646_2822, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/406_index.zip 2018-07-21T05:32:57,119 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6175a1b8cf7d43a2ab0bd63f5ffbe36e/406_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,123 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,123 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743647_2823, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_406.json 2018-07-21T05:32:57,145 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_406.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,169 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,169 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743648_2824, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb7079b56e844251a30b31aa374c3901/407_descriptor.json 2018-07-21T05:32:57,175 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb7079b56e844251a30b31aa374c3901/407_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,177 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,177 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743649_2825, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/407_index.zip 2018-07-21T05:32:57,190 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/cb7079b56e844251a30b31aa374c3901/407_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,194 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,194 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743650_2826, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_407.json 2018-07-21T05:32:57,203 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_407.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,229 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,229 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743651_2827, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10205faf4ec940e39050ffd70cf71601/408_descriptor.json 2018-07-21T05:32:57,238 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10205faf4ec940e39050ffd70cf71601/408_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,240 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,241 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,241 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743652_2828, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/408_index.zip 2018-07-21T05:32:57,250 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/10205faf4ec940e39050ffd70cf71601/408_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,253 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,253 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,254 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,254 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743653_2829, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_408.json 2018-07-21T05:32:57,259 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_408.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,276 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,276 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743654_2830, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec6c9158908743038d69c14308122bed/409_descriptor.json 2018-07-21T05:32:57,280 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec6c9158908743038d69c14308122bed/409_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,282 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,282 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743655_2831, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/409_index.zip 2018-07-21T05:32:57,288 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ec6c9158908743038d69c14308122bed/409_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,291 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,291 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,291 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,291 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,292 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,292 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,292 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,292 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,292 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743656_2832, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_409.json 2018-07-21T05:32:57,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:32:57,696 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_409.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,713 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,713 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743657_2833, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6024bf335b0f4401bf725110ef65978b/410_descriptor.json 2018-07-21T05:32:57,718 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6024bf335b0f4401bf725110ef65978b/410_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:57,719 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,719 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743658_2834, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/410_index.zip 2018-07-21T05:32:57,723 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/6024bf335b0f4401bf725110ef65978b/410_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,726 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,726 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743659_2835, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_410.json 2018-07-21T05:32:57,732 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_410.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,749 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,749 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743660_2836, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57ef93a9f69447f1836ea23ef52fdb87/411_descriptor.json 2018-07-21T05:32:57,757 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57ef93a9f69447f1836ea23ef52fdb87/411_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,759 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,759 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743661_2837, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/411_index.zip 2018-07-21T05:32:57,764 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/57ef93a9f69447f1836ea23ef52fdb87/411_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,767 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,767 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743662_2838, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_411.json 2018-07-21T05:32:57,770 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_411.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,787 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,788 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,788 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743663_2839, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/558439befb604f0b9aac762f29ad83e6/412_descriptor.json 2018-07-21T05:32:57,792 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/558439befb604f0b9aac762f29ad83e6/412_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:57,793 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,794 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,794 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743664_2840, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/412_index.zip 2018-07-21T05:32:57,797 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/558439befb604f0b9aac762f29ad83e6/412_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,800 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,800 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743665_2841, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_412.json 2018-07-21T05:32:57,809 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_412.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,826 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743666_2842, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49993634202140a68dc641dabe52b02b/413_descriptor.json 2018-07-21T05:32:57,830 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49993634202140a68dc641dabe52b02b/413_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,831 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:57,832 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,832 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,832 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743667_2843, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/413_index.zip 2018-07-21T05:32:57,835 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/49993634202140a68dc641dabe52b02b/413_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,838 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743668_2844, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_413.json 2018-07-21T05:32:57,842 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_413.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,859 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,859 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743669_2845, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff6982b830748c88caf205934127504/414_descriptor.json 2018-07-21T05:32:57,864 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff6982b830748c88caf205934127504/414_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,866 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,866 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743670_2846, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/414_index.zip 2018-07-21T05:32:57,870 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1ff6982b830748c88caf205934127504/414_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,873 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,873 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,874 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,874 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743671_2847, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_414.json 2018-07-21T05:32:57,878 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_414.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,902 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,902 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743672_2848, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2444e75beec8453d91c6ca5e374f196f/415_descriptor.json 2018-07-21T05:32:57,906 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2444e75beec8453d91c6ca5e374f196f/415_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,908 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,908 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743673_2849, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/415_index.zip 2018-07-21T05:32:57,913 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/2444e75beec8453d91c6ca5e374f196f/415_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,916 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,917 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,917 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743674_2850, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_415.json 2018-07-21T05:32:57,921 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_415.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,940 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,940 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743675_2851, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/89e3ef2d49a74076ba075f0eae9de73e/416_descriptor.json 2018-07-21T05:32:57,944 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/89e3ef2d49a74076ba075f0eae9de73e/416_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:57,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,946 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743676_2852, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/416_index.zip 2018-07-21T05:32:57,950 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/89e3ef2d49a74076ba075f0eae9de73e/416_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:57,953 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743677_2853, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_416.json 2018-07-21T05:32:57,957 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_416.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,975 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,975 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:57,976 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,976 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743678_2854, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ff3cc5f76c4d2785f155400fc527dc/417_descriptor.json 2018-07-21T05:32:57,980 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ff3cc5f76c4d2785f155400fc527dc/417_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:57,982 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:57,982 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743679_2855, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/417_index.zip 2018-07-21T05:32:57,985 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a4ff3cc5f76c4d2785f155400fc527dc/417_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:57,988 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:57,988 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743680_2856, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_417.json 2018-07-21T05:32:57,992 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_417.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,010 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,010 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743681_2857, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dbbc27931e05468b98a4de9d87ce76ae/418_descriptor.json 2018-07-21T05:32:58,017 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dbbc27931e05468b98a4de9d87ce76ae/418_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,019 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,019 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743682_2858, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/418_index.zip 2018-07-21T05:32:58,023 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dbbc27931e05468b98a4de9d87ce76ae/418_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,026 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,026 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743683_2859, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_418.json 2018-07-21T05:32:58,029 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_418.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,046 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,046 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743684_2860, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/729c4d0ae87b4d09b07d595aad9c9bd3/419_descriptor.json 2018-07-21T05:32:58,049 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/729c4d0ae87b4d09b07d595aad9c9bd3/419_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,051 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,051 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743685_2861, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/419_index.zip 2018-07-21T05:32:58,055 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/729c4d0ae87b4d09b07d595aad9c9bd3/419_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,058 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743686_2862, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_419.json 2018-07-21T05:32:58,061 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_419.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,081 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,082 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743687_2863, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97293a612da84291a62fc12bd5270701/420_descriptor.json 2018-07-21T05:32:58,086 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97293a612da84291a62fc12bd5270701/420_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,088 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,088 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743688_2864, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/420_index.zip 2018-07-21T05:32:58,092 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/97293a612da84291a62fc12bd5270701/420_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,095 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,095 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743689_2865, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_420.json 2018-07-21T05:32:58,098 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_420.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,115 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,115 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743690_2866, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ae14fd7ef5b4399bb40d8104948d397/421_descriptor.json 2018-07-21T05:32:58,119 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ae14fd7ef5b4399bb40d8104948d397/421_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,120 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,121 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743691_2867, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/421_index.zip 2018-07-21T05:32:58,129 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ae14fd7ef5b4399bb40d8104948d397/421_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,132 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,132 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743692_2868, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_421.json 2018-07-21T05:32:58,136 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_421.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,153 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,153 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743693_2869, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c4c2ede38bcc49538a7ac4fae2b4e62e/422_descriptor.json 2018-07-21T05:32:58,158 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c4c2ede38bcc49538a7ac4fae2b4e62e/422_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,160 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,160 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743694_2870, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/422_index.zip 2018-07-21T05:32:58,164 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c4c2ede38bcc49538a7ac4fae2b4e62e/422_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,167 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,167 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743695_2871, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_422.json 2018-07-21T05:32:58,171 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_422.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,192 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,192 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743696_2872, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff23b7d1e23844b38bfce0e2789227a5/423_descriptor.json 2018-07-21T05:32:58,196 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff23b7d1e23844b38bfce0e2789227a5/423_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,197 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,197 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,198 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,198 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743697_2873, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/423_index.zip 2018-07-21T05:32:58,201 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff23b7d1e23844b38bfce0e2789227a5/423_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,204 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,204 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743698_2874, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_423.json 2018-07-21T05:32:58,208 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_423.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,226 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,226 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743699_2875, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ba151e6dbca4921af4a11168e5d2f0b/424_descriptor.json 2018-07-21T05:32:58,230 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ba151e6dbca4921af4a11168e5d2f0b/424_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,232 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,233 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,233 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743700_2876, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/424_index.zip 2018-07-21T05:32:58,236 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9ba151e6dbca4921af4a11168e5d2f0b/424_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,239 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,239 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743701_2877, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_424.json 2018-07-21T05:32:58,243 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_424.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,260 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,260 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743702_2878, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a3888ff25694f8696fbad02962a0a1f/425_descriptor.json 2018-07-21T05:32:58,273 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a3888ff25694f8696fbad02962a0a1f/425_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,275 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,275 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743703_2879, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/425_index.zip 2018-07-21T05:32:58,279 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7a3888ff25694f8696fbad02962a0a1f/425_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,282 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,282 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743704_2880, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_425.json 2018-07-21T05:32:58,286 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_425.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,303 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,303 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743705_2881, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db1756e335c24564bc3a516d99b28260/426_descriptor.json 2018-07-21T05:32:58,311 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db1756e335c24564bc3a516d99b28260/426_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,313 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,313 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743706_2882, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/426_index.zip 2018-07-21T05:32:58,317 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/db1756e335c24564bc3a516d99b28260/426_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,320 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,320 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743707_2883, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_426.json 2018-07-21T05:32:58,324 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_426.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,341 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,341 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,342 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,342 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743708_2884, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3cf587b10df45b9b8d751c92f08fce0/427_descriptor.json 2018-07-21T05:32:58,346 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3cf587b10df45b9b8d751c92f08fce0/427_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,348 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743709_2885, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/427_index.zip 2018-07-21T05:32:58,353 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f3cf587b10df45b9b8d751c92f08fce0/427_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,356 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,356 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743710_2886, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_427.json 2018-07-21T05:32:58,360 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_427.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,379 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,379 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743711_2887, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/396c85e1b18e4b1f94df5661597ad222/428_descriptor.json 2018-07-21T05:32:58,385 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/396c85e1b18e4b1f94df5661597ad222/428_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,387 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,387 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743712_2888, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/428_index.zip 2018-07-21T05:32:58,391 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/396c85e1b18e4b1f94df5661597ad222/428_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,394 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,394 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743713_2889, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_428.json 2018-07-21T05:32:58,400 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_428.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,417 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,417 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,418 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,418 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743714_2890, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e6850432a694453812d6cfac7f9adb9/429_descriptor.json 2018-07-21T05:32:58,422 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e6850432a694453812d6cfac7f9adb9/429_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,424 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,424 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743715_2891, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/429_index.zip 2018-07-21T05:32:58,428 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9e6850432a694453812d6cfac7f9adb9/429_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,431 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,431 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743716_2892, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_429.json 2018-07-21T05:32:58,436 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_429.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,461 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,461 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743717_2893, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/859c716bb0a342cdb7eaf6a6b7ac7634/430_descriptor.json 2018-07-21T05:32:58,468 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/859c716bb0a342cdb7eaf6a6b7ac7634/430_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,470 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,470 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743718_2894, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/430_index.zip 2018-07-21T05:32:58,477 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/859c716bb0a342cdb7eaf6a6b7ac7634/430_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,482 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,482 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743719_2895, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_430.json 2018-07-21T05:32:58,488 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_430.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,516 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,516 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,517 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,517 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743720_2896, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc0db78a1dc84b739db9c27791f3ad79/431_descriptor.json 2018-07-21T05:32:58,525 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc0db78a1dc84b739db9c27791f3ad79/431_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,528 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,528 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743721_2897, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/431_index.zip 2018-07-21T05:32:58,533 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/fc0db78a1dc84b739db9c27791f3ad79/431_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,537 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,537 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743722_2898, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_431.json 2018-07-21T05:32:58,545 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_431.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,567 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,567 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743723_2899, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bef83abce8124a1bb5954167ac9ac2f0/432_descriptor.json 2018-07-21T05:32:58,572 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bef83abce8124a1bb5954167ac9ac2f0/432_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,576 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,577 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743724_2900, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/432_index.zip 2018-07-21T05:32:58,585 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bef83abce8124a1bb5954167ac9ac2f0/432_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,588 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,588 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743725_2901, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_432.json 2018-07-21T05:32:58,593 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_432.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,614 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,614 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743726_2902, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d564492ab74341418b0b82dd4db37aba/433_descriptor.json 2018-07-21T05:32:58,619 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d564492ab74341418b0b82dd4db37aba/433_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,621 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,621 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743727_2903, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/433_index.zip 2018-07-21T05:32:58,626 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d564492ab74341418b0b82dd4db37aba/433_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,630 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,630 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743728_2904, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_433.json 2018-07-21T05:32:58,634 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_433.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,655 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,655 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743729_2905, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f570d4441bf64b6492618f1a237cff68/434_descriptor.json 2018-07-21T05:32:58,661 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f570d4441bf64b6492618f1a237cff68/434_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,663 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,663 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743730_2906, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/434_index.zip 2018-07-21T05:32:58,668 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/f570d4441bf64b6492618f1a237cff68/434_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,671 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,671 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743731_2907, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_434.json 2018-07-21T05:32:58,676 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_434.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,698 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,698 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743732_2908, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddfcbbbf6c294a7fac37d21714011b82/435_descriptor.json 2018-07-21T05:32:58,702 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddfcbbbf6c294a7fac37d21714011b82/435_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,704 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,704 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743733_2909, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/435_index.zip 2018-07-21T05:32:58,709 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddfcbbbf6c294a7fac37d21714011b82/435_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,712 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,712 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743734_2910, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_435.json 2018-07-21T05:32:58,717 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_435.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,739 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,739 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743735_2911, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff0e50a1464e4ff2a335c339a93c8f5c/436_descriptor.json 2018-07-21T05:32:58,744 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff0e50a1464e4ff2a335c339a93c8f5c/436_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,746 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,746 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743736_2912, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/436_index.zip 2018-07-21T05:32:58,751 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ff0e50a1464e4ff2a335c339a93c8f5c/436_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,755 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,755 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743737_2913, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_436.json 2018-07-21T05:32:58,761 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_436.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,783 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,783 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743738_2914, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de5342ad9ef84e35ba66163da25b43e3/437_descriptor.json 2018-07-21T05:32:58,788 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de5342ad9ef84e35ba66163da25b43e3/437_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,790 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,791 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,791 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743739_2915, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/437_index.zip 2018-07-21T05:32:58,796 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/de5342ad9ef84e35ba66163da25b43e3/437_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,799 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,799 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743740_2916, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_437.json 2018-07-21T05:32:58,804 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_437.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,831 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,831 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743741_2917, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7530b6021d6436597aa8a6ec270c993/438_descriptor.json 2018-07-21T05:32:58,837 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7530b6021d6436597aa8a6ec270c993/438_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,839 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,839 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743742_2918, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/438_index.zip 2018-07-21T05:32:58,843 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a7530b6021d6436597aa8a6ec270c993/438_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,847 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,847 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743743_2919, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_438.json 2018-07-21T05:32:58,852 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_438.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,871 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,871 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743744_2920, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/29abd0e34aa34e1689630d46c4cbf1c0/439_descriptor.json 2018-07-21T05:32:58,878 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/29abd0e34aa34e1689630d46c4cbf1c0/439_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,880 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,880 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743745_2921, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/439_index.zip 2018-07-21T05:32:58,884 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/29abd0e34aa34e1689630d46c4cbf1c0/439_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:58,887 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,888 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743746_2922, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_439.json 2018-07-21T05:32:58,894 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_439.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,915 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,915 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743747_2923, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ac528b10ea8141d3ab463ab1c2bf6659/440_descriptor.json 2018-07-21T05:32:58,919 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ac528b10ea8141d3ab463ab1c2bf6659/440_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:58,922 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,922 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743748_2924, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/440_index.zip 2018-07-21T05:32:58,927 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ac528b10ea8141d3ab463ab1c2bf6659/440_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,930 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,931 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743749_2925, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_440.json 2018-07-21T05:32:58,935 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_440.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:58,956 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,956 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743750_2926, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5adfd36badaf47f7a9e93ab2c7d62823/441_descriptor.json 2018-07-21T05:32:58,961 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5adfd36badaf47f7a9e93ab2c7d62823/441_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,962 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,962 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:58,963 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,963 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743751_2927, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/441_index.zip 2018-07-21T05:32:58,967 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5adfd36badaf47f7a9e93ab2c7d62823/441_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:58,970 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,970 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743752_2928, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_441.json 2018-07-21T05:32:58,974 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_441.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:58,992 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:58,993 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743753_2929, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9cbd7b72a01540b89d47f5c5da6a1dba/442_descriptor.json 2018-07-21T05:32:58,997 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9cbd7b72a01540b89d47f5c5da6a1dba/442_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:58,998 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:58,999 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743754_2930, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/442_index.zip 2018-07-21T05:32:59,002 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9cbd7b72a01540b89d47f5c5da6a1dba/442_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:59,005 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,005 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743755_2931, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_442.json 2018-07-21T05:32:59,009 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_442.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,036 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,036 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743756_2932, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ab32f8bc80694ca0a797d5237d0debc8/443_descriptor.json 2018-07-21T05:32:59,041 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ab32f8bc80694ca0a797d5237d0debc8/443_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,042 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,042 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743757_2933, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/443_index.zip 2018-07-21T05:32:59,046 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ab32f8bc80694ca0a797d5237d0debc8/443_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,049 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,049 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,049 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,049 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,050 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,050 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743758_2934, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_443.json 2018-07-21T05:32:59,054 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_443.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,075 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,076 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743759_2935, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b41f463ebab4f668fa13e509fa74598/444_descriptor.json 2018-07-21T05:32:59,083 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b41f463ebab4f668fa13e509fa74598/444_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,094 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,094 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743760_2936, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/444_index.zip 2018-07-21T05:32:59,099 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4b41f463ebab4f668fa13e509fa74598/444_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,103 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,104 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,104 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743761_2937, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_444.json 2018-07-21T05:32:59,108 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_444.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,129 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,129 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743762_2938, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5897580b01a54fcd9523f4ed6bb3b403/445_descriptor.json 2018-07-21T05:32:59,135 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5897580b01a54fcd9523f4ed6bb3b403/445_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,136 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,137 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,137 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743763_2939, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/445_index.zip 2018-07-21T05:32:59,143 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5897580b01a54fcd9523f4ed6bb3b403/445_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,146 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,146 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743764_2940, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_445.json 2018-07-21T05:32:59,151 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_445.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,180 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,180 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743765_2941, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3fe077ef86904dcc936de5a3711b2e39/446_descriptor.json 2018-07-21T05:32:59,184 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3fe077ef86904dcc936de5a3711b2e39/446_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,187 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743766_2942, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/446_index.zip 2018-07-21T05:32:59,190 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/3fe077ef86904dcc936de5a3711b2e39/446_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,194 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743767_2943, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_446.json 2018-07-21T05:32:59,198 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_446.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,219 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,219 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743768_2944, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9267dc27ce66480cab8ecbb8ec35ce3a/447_descriptor.json 2018-07-21T05:32:59,223 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9267dc27ce66480cab8ecbb8ec35ce3a/447_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,225 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,225 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743769_2945, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/447_index.zip 2018-07-21T05:32:59,230 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9267dc27ce66480cab8ecbb8ec35ce3a/447_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,233 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743770_2946, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_447.json 2018-07-21T05:32:59,238 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_447.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,258 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,258 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743771_2947, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e2b33cff54c54df693b0605734bae1f1/448_descriptor.json 2018-07-21T05:32:59,263 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e2b33cff54c54df693b0605734bae1f1/448_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,265 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,265 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743772_2948, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/448_index.zip 2018-07-21T05:32:59,670 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e2b33cff54c54df693b0605734bae1f1/448_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,673 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,674 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743773_2949, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_448.json 2018-07-21T05:32:59,679 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_448.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,698 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,698 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743774_2950, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bcf8ec5cbbb8441c924eae9e26cb7175/449_descriptor.json 2018-07-21T05:32:59,702 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bcf8ec5cbbb8441c924eae9e26cb7175/449_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,703 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,704 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743775_2951, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/449_index.zip 2018-07-21T05:32:59,707 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bcf8ec5cbbb8441c924eae9e26cb7175/449_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,710 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,710 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743776_2952, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_449.json 2018-07-21T05:32:59,714 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_449.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,731 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,731 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743777_2953, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/151b5da93f1f422d9af7f6af4f50aa8a/450_descriptor.json 2018-07-21T05:32:59,735 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/151b5da93f1f422d9af7f6af4f50aa8a/450_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,737 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,738 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,738 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743778_2954, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/450_index.zip 2018-07-21T05:32:59,745 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/151b5da93f1f422d9af7f6af4f50aa8a/450_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,748 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,748 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743779_2955, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_450.json 2018-07-21T05:32:59,753 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_450.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,769 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743780_2956, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1681bcf93f1244f0adf99131ad50fd15/451_descriptor.json 2018-07-21T05:32:59,773 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1681bcf93f1244f0adf99131ad50fd15/451_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,775 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743781_2957, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/451_index.zip 2018-07-21T05:32:59,779 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1681bcf93f1244f0adf99131ad50fd15/451_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,782 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,783 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743782_2958, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_451.json 2018-07-21T05:32:59,787 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_451.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,804 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,805 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743783_2959, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7133f46bdddf4e779350abe919a42a45/452_descriptor.json 2018-07-21T05:32:59,809 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7133f46bdddf4e779350abe919a42a45/452_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,811 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,811 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743784_2960, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/452_index.zip 2018-07-21T05:32:59,815 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7133f46bdddf4e779350abe919a42a45/452_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,819 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,819 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743785_2961, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_452.json 2018-07-21T05:32:59,823 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_452.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,847 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,847 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743786_2962, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1985a82d42c64de595a16136c55b8391/453_descriptor.json 2018-07-21T05:32:59,852 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1985a82d42c64de595a16136c55b8391/453_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,853 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,853 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,854 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,854 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743787_2963, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/453_index.zip 2018-07-21T05:32:59,860 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1985a82d42c64de595a16136c55b8391/453_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,863 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,863 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743788_2964, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_453.json 2018-07-21T05:32:59,869 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_453.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,895 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743789_2965, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12682022153a46b685b83cc5e5ffeb53/454_descriptor.json 2018-07-21T05:32:59,900 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12682022153a46b685b83cc5e5ffeb53/454_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,902 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,902 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,903 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,903 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743790_2966, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/454_index.zip 2018-07-21T05:32:59,911 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/12682022153a46b685b83cc5e5ffeb53/454_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,919 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743791_2967, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_454.json 2018-07-21T05:32:59,923 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_454.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,941 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,942 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:32:59,942 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743792_2968, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/075ad65c239a44feb5b96fefd8c59b7e/455_descriptor.json 2018-07-21T05:32:59,951 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/075ad65c239a44feb5b96fefd8c59b7e/455_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:32:59,953 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,953 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743793_2969, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/455_index.zip 2018-07-21T05:32:59,957 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/075ad65c239a44feb5b96fefd8c59b7e/455_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:32:59,960 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,960 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743794_2970, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_455.json 2018-07-21T05:32:59,964 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_455.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,980 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:32:59,981 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,981 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743795_2971, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50e8faa828064c0bab30c63417f5dae5/456_descriptor.json 2018-07-21T05:32:59,985 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50e8faa828064c0bab30c63417f5dae5/456_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:32:59,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,987 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743796_2972, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/456_index.zip 2018-07-21T05:32:59,994 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/50e8faa828064c0bab30c63417f5dae5/456_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:32:59,997 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:32:59,997 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743797_2973, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_456.json 2018-07-21T05:33:00,001 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_456.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,019 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,019 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743798_2974, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8836d812caab427ebf78d620e317fb27/457_descriptor.json 2018-07-21T05:33:00,023 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8836d812caab427ebf78d620e317fb27/457_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,025 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,025 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743799_2975, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/457_index.zip 2018-07-21T05:33:00,029 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8836d812caab427ebf78d620e317fb27/457_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,031 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,032 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,032 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,032 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743800_2976, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_457.json 2018-07-21T05:33:00,035 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_457.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,052 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,052 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743801_2977, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/633730d791fd409bbbe987a94e29f8c9/458_descriptor.json 2018-07-21T05:33:00,056 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/633730d791fd409bbbe987a94e29f8c9/458_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,058 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,058 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743802_2978, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/458_index.zip 2018-07-21T05:33:00,062 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/633730d791fd409bbbe987a94e29f8c9/458_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,065 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,065 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743803_2979, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_458.json 2018-07-21T05:33:00,068 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_458.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,086 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,086 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743804_2980, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/77848693ce844d47aba0ffa9efd909f8/459_descriptor.json 2018-07-21T05:33:00,090 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/77848693ce844d47aba0ffa9efd909f8/459_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,092 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,092 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743805_2981, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/459_index.zip 2018-07-21T05:33:00,096 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/77848693ce844d47aba0ffa9efd909f8/459_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,099 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743806_2982, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_459.json 2018-07-21T05:33:00,103 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_459.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,121 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,121 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743807_2983, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1bce598b688d428691348f4e83857389/460_descriptor.json 2018-07-21T05:33:00,128 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1bce598b688d428691348f4e83857389/460_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,133 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,133 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743808_2984, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/460_index.zip 2018-07-21T05:33:00,137 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/1bce598b688d428691348f4e83857389/460_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:00,144 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,144 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743809_2985, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_460.json 2018-07-21T05:33:00,152 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_460.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,179 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743810_2986, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8412f1c7abb4edda9897dbcb5327150/461_descriptor.json 2018-07-21T05:33:00,183 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8412f1c7abb4edda9897dbcb5327150/461_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,185 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,185 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743811_2987, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/461_index.zip 2018-07-21T05:33:00,189 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/d8412f1c7abb4edda9897dbcb5327150/461_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,193 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,193 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743812_2988, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_461.json 2018-07-21T05:33:00,197 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_461.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,222 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,222 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743813_2989, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c204cc32773847609fd7293e67e8790a/462_descriptor.json 2018-07-21T05:33:00,227 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c204cc32773847609fd7293e67e8790a/462_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,229 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,229 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743814_2990, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/462_index.zip 2018-07-21T05:33:00,234 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c204cc32773847609fd7293e67e8790a/462_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,237 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,237 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743815_2991, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_462.json 2018-07-21T05:33:00,597 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:33:00,649 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_462.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,668 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,668 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743816_2992, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053337f6367a40418824b6d1b12468f5/463_descriptor.json 2018-07-21T05:33:00,675 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053337f6367a40418824b6d1b12468f5/463_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,678 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,678 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743817_2993, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/463_index.zip 2018-07-21T05:33:00,687 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/053337f6367a40418824b6d1b12468f5/463_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,693 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,693 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743818_2994, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_463.json 2018-07-21T05:33:00,699 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_463.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,719 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,719 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743819_2995, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e58856a1e84c4273a045abd9c50aac05/464_descriptor.json 2018-07-21T05:33:00,725 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e58856a1e84c4273a045abd9c50aac05/464_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,727 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,728 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743820_2996, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/464_index.zip 2018-07-21T05:33:00,741 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e58856a1e84c4273a045abd9c50aac05/464_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,744 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,745 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,745 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743821_2997, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_464.json 2018-07-21T05:33:00,755 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_464.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,774 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,774 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743822_2998, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ef4add80d1244c2a9cc5ea29f762633/465_descriptor.json 2018-07-21T05:33:00,782 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ef4add80d1244c2a9cc5ea29f762633/465_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,785 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,786 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743823_2999, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/465_index.zip 2018-07-21T05:33:00,790 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8ef4add80d1244c2a9cc5ea29f762633/465_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,793 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,793 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743824_3000, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_465.json 2018-07-21T05:33:00,797 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_465.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,815 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,815 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743825_3001, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d83c8d79594f6ea5231b5f151f7ebd/466_descriptor.json 2018-07-21T05:33:00,823 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d83c8d79594f6ea5231b5f151f7ebd/466_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:00,825 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,825 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743826_3002, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/466_index.zip 2018-07-21T05:33:00,830 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e6d83c8d79594f6ea5231b5f151f7ebd/466_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,837 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,837 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743827_3003, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_466.json 2018-07-21T05:33:00,847 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_466.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,866 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,867 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,867 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743828_3004, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0dda3fc5f3234ec182f5ba712c44794e/467_descriptor.json 2018-07-21T05:33:00,871 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0dda3fc5f3234ec182f5ba712c44794e/467_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,874 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,874 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743829_3005, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/467_index.zip 2018-07-21T05:33:00,878 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/0dda3fc5f3234ec182f5ba712c44794e/467_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,883 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,883 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743830_3006, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_467.json 2018-07-21T05:33:00,887 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_467.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,904 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,904 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743831_3007, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca4e0d5832c14793aae7617dbb1666d1/468_descriptor.json 2018-07-21T05:33:00,908 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca4e0d5832c14793aae7617dbb1666d1/468_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,910 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,910 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743832_3008, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/468_index.zip 2018-07-21T05:33:00,914 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ca4e0d5832c14793aae7617dbb1666d1/468_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,917 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,917 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743833_3009, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_468.json 2018-07-21T05:33:00,921 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_468.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:00,946 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,946 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743834_3010, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c94c3c4c4e6544a4ae5d9707162be678/469_descriptor.json 2018-07-21T05:33:00,957 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c94c3c4c4e6544a4ae5d9707162be678/469_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:00,959 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,959 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743835_3011, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/469_index.zip 2018-07-21T05:33:00,963 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c94c3c4c4e6544a4ae5d9707162be678/469_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,967 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,967 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743836_3012, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_469.json 2018-07-21T05:33:00,971 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_469.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:00,988 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:00,988 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743837_3013, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bac15334bff0425a8c2896b2d9e5d7de/470_descriptor.json 2018-07-21T05:33:00,992 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bac15334bff0425a8c2896b2d9e5d7de/470_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:00,994 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:00,994 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743838_3014, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/470_index.zip 2018-07-21T05:33:01,399 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/bac15334bff0425a8c2896b2d9e5d7de/470_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,403 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,403 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743839_3015, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_470.json 2018-07-21T05:33:01,407 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_470.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,423 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,423 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743840_3016, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60a32a30d11048c2999e651c833ab13c/471_descriptor.json 2018-07-21T05:33:01,427 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60a32a30d11048c2999e651c833ab13c/471_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,429 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,429 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743841_3017, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/471_index.zip 2018-07-21T05:33:01,433 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/60a32a30d11048c2999e651c833ab13c/471_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,436 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,436 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743842_3018, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_471.json 2018-07-21T05:33:01,440 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_471.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,461 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743843_3019, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59d857e3b51b4932b642f4346534e676/472_descriptor.json 2018-07-21T05:33:01,464 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59d857e3b51b4932b642f4346534e676/472_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,466 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,466 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743844_3020, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/472_index.zip 2018-07-21T05:33:01,470 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/59d857e3b51b4932b642f4346534e676/472_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,472 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,473 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,473 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743845_3021, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_472.json 2018-07-21T05:33:01,476 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_472.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,492 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,492 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743846_3022, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/146f81f3e5e0402bba3b568583046212/473_descriptor.json 2018-07-21T05:33:01,497 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/146f81f3e5e0402bba3b568583046212/473_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,498 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,498 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,498 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,499 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,499 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743847_3023, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/473_index.zip 2018-07-21T05:33:01,503 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/146f81f3e5e0402bba3b568583046212/473_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,505 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,505 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,506 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,506 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743848_3024, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_473.json 2018-07-21T05:33:01,509 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_473.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,539 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,539 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743849_3025, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddd450b34a7042f18a1926564d66fb57/474_descriptor.json 2018-07-21T05:33:01,543 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddd450b34a7042f18a1926564d66fb57/474_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,545 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,545 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743850_3026, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/474_index.zip 2018-07-21T05:33:01,548 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/ddd450b34a7042f18a1926564d66fb57/474_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,551 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,551 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743851_3027, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_474.json 2018-07-21T05:33:01,555 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_474.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,574 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,574 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743852_3028, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41dc09db219e434dad506cb63cd8f408/475_descriptor.json 2018-07-21T05:33:01,578 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41dc09db219e434dad506cb63cd8f408/475_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,580 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,580 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743853_3029, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/475_index.zip 2018-07-21T05:33:01,585 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/41dc09db219e434dad506cb63cd8f408/475_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,589 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,589 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743854_3030, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_475.json 2018-07-21T05:33:01,593 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_475.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,611 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,611 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743855_3031, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/05ed2fc78fcb4277909762b37d107f1a/476_descriptor.json 2018-07-21T05:33:01,621 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/05ed2fc78fcb4277909762b37d107f1a/476_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,623 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,623 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743856_3032, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/476_index.zip 2018-07-21T05:33:01,627 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/05ed2fc78fcb4277909762b37d107f1a/476_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,631 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,631 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743857_3033, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_476.json 2018-07-21T05:33:01,635 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_476.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,660 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,660 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743858_3034, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b014cfc768f2463da202e2c53079a103/477_descriptor.json 2018-07-21T05:33:01,669 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b014cfc768f2463da202e2c53079a103/477_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,671 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,671 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743859_3035, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/477_index.zip 2018-07-21T05:33:01,675 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/b014cfc768f2463da202e2c53079a103/477_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,678 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,678 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743860_3036, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_477.json 2018-07-21T05:33:01,682 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_477.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,698 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,698 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743861_3037, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a142c5a7aba14b3184c1cc311c6cc364/478_descriptor.json 2018-07-21T05:33:01,702 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a142c5a7aba14b3184c1cc311c6cc364/478_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,704 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,704 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743862_3038, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/478_index.zip 2018-07-21T05:33:01,708 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/a142c5a7aba14b3184c1cc311c6cc364/478_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,710 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,710 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743863_3039, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_478.json 2018-07-21T05:33:01,714 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_478.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,730 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,730 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743864_3040, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8bf8dd33f10d42b4a2a81f33727ad27d/479_descriptor.json 2018-07-21T05:33:01,734 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8bf8dd33f10d42b4a2a81f33727ad27d/479_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,736 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,736 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743865_3041, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/479_index.zip 2018-07-21T05:33:01,740 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/8bf8dd33f10d42b4a2a81f33727ad27d/479_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,743 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,743 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743866_3042, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_479.json 2018-07-21T05:33:01,746 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_479.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,763 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,764 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,764 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743867_3043, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a3b578a63724c66891be95117b8034d/480_descriptor.json 2018-07-21T05:33:01,767 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a3b578a63724c66891be95117b8034d/480_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,769 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,769 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743868_3044, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/480_index.zip 2018-07-21T05:33:01,772 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/4a3b578a63724c66891be95117b8034d/480_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,775 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,776 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,776 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,776 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,776 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,776 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,776 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743869_3045, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_480.json 2018-07-21T05:33:01,780 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_480.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:01,797 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,797 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743870_3046, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/756a1f6a99c94ef3b4ce3e4119c86cc1/481_descriptor.json 2018-07-21T05:33:01,801 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/756a1f6a99c94ef3b4ce3e4119c86cc1/481_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,803 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,803 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743871_3047, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/481_index.zip 2018-07-21T05:33:01,807 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/756a1f6a99c94ef3b4ce3e4119c86cc1/481_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,810 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,810 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743872_3048, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_481.json 2018-07-21T05:33:01,814 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_481.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,832 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,832 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,832 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,833 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,833 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743873_3049, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7b764ef949604ee487c6eb3649d73507/482_descriptor.json 2018-07-21T05:33:01,837 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7b764ef949604ee487c6eb3649d73507/482_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,838 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,838 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,839 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,839 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743874_3050, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/482_index.zip 2018-07-21T05:33:01,843 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/7b764ef949604ee487c6eb3649d73507/482_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,847 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,848 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743875_3051, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_482.json 2018-07-21T05:33:01,852 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_482.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,869 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,869 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743876_3052, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c3e260eb0914814aad3d90ce3e25c2a/483_descriptor.json 2018-07-21T05:33:01,873 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c3e260eb0914814aad3d90ce3e25c2a/483_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,875 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,875 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743877_3053, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/483_index.zip 2018-07-21T05:33:01,879 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/9c3e260eb0914814aad3d90ce3e25c2a/483_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,882 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743878_3054, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_483.json 2018-07-21T05:33:01,886 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_483.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,903 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,903 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743879_3055, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/46c5c1e166d2443f81cff547b53d4eb8/484_descriptor.json 2018-07-21T05:33:01,907 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/46c5c1e166d2443f81cff547b53d4eb8/484_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:01,909 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,909 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743880_3056, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/484_index.zip 2018-07-21T05:33:01,913 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/46c5c1e166d2443f81cff547b53d4eb8/484_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:01,916 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,916 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743881_3057, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_484.json 2018-07-21T05:33:01,920 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_484.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,942 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,942 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743882_3058, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c07aa75cc51846c3a6435a510a00a800/485_descriptor.json 2018-07-21T05:33:01,946 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c07aa75cc51846c3a6435a510a00a800/485_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:01,949 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,949 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743883_3059, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/485_index.zip 2018-07-21T05:33:01,953 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/c07aa75cc51846c3a6435a510a00a800/485_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:01,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,956 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743884_3060, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_485.json 2018-07-21T05:33:01,960 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_485.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:01,978 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:01,978 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743885_3061, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/08704ded232247e4a443876fda5f7816/486_descriptor.json 2018-07-21T05:33:01,982 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/08704ded232247e4a443876fda5f7816/486_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:01,984 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:01,984 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743886_3062, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/486_index.zip 2018-07-21T05:33:02,389 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/08704ded232247e4a443876fda5f7816/486_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:02,393 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:02,393 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743887_3063, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_486.json 2018-07-21T05:33:02,397 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_486.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:02,414 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,414 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743888_3064, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e51733bf15b54b5eb91653a1b7056702/487_descriptor.json 2018-07-21T05:33:02,418 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e51733bf15b54b5eb91653a1b7056702/487_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,420 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:02,420 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743889_3065, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/487_index.zip 2018-07-21T05:33:02,424 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/e51733bf15b54b5eb91653a1b7056702/487_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:02,427 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,427 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743890_3066, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_487.json 2018-07-21T05:33:02,431 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_487.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:02,448 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,448 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743891_3067, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5da947a6d69545dabfbd5d3efe5427b5/488_descriptor.json 2018-07-21T05:33:02,452 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5da947a6d69545dabfbd5d3efe5427b5/488_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,453 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,454 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743892_3068, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/488_index.zip 2018-07-21T05:33:02,457 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/5da947a6d69545dabfbd5d3efe5427b5/488_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,460 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743893_3069, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_488.json 2018-07-21T05:33:02,464 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_488.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:02,482 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,482 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743894_3070, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91adf73d5c9b4f35b637cf18a2272fb5/489_descriptor.json 2018-07-21T05:33:02,486 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91adf73d5c9b4f35b637cf18a2272fb5/489_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:02,488 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:02,488 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743895_3071, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/489_index.zip 2018-07-21T05:33:02,492 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/91adf73d5c9b4f35b637cf18a2272fb5/489_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,495 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743896_3072, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_489.json 2018-07-21T05:33:02,499 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_489.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,517 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,517 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743897_3073, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dd3e7d6e4ffd465293c00669073472a3/490_descriptor.json 2018-07-21T05:33:02,521 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dd3e7d6e4ffd465293c00669073472a3/490_descriptor.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:02,523 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,523 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743898_3074, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/490_index.zip 2018-07-21T05:33:02,527 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/intermediateSegmentDir/default.druid_max_size_partition/dd3e7d6e4ffd465293c00669073472a3/490_index.zip is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:02,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:02,530 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743899_3075, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_490.json 2018-07-21T05:33:02,534 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3/segmentsDescriptorDir/default.druid_max_size_partition_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053159.547-0700_490.json is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,548 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,548 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,548 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:02,549 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:02,549 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743900_3076, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:33:02,553 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0001_r_000000_0_-303125279_30 2018-07-21T05:33:02,573 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/dag_1532175606211_0001_7.recovery is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:33:02,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:33:02,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:33:02,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:33:02,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:33:02,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:33:02,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@296a0e92, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:33:02,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:33:02,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:33:02,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:33:02,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:33:02,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:33:02,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:33:02,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_max_size_partition 2018-07-21T05:33:02,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:33:02,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_max_size_partition from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10002 2018-07-21T05:33:02,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:33:02,587 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:33:02,587 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_max_size_partition 2018-07-21T05:33:02,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_max_size_partition on null 2018-07-21T05:33:02,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:33:02,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:33:02,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,590 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:33:02,590 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:33:02,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:33:02,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:33:02,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:33:02,596 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:33:02,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.SQLMetadataConnector: Table[druid_segments] already exists 2018-07-21T05:33:02,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: pre-create data source with name default.druid_max_size_partition 2018-07-21T05:33:02,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: create_table: Table(tableName:druid_max_size_partition, dbName:default, owner:hive_test_user, createTime:1532176382, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.datasource=default.druid_max_size_partition, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:33:02,608 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=create_table: Table(tableName:druid_max_size_partition, dbName:default, owner:hive_test_user, createTime:1532176382, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.druid.serde.DruidSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{druid.segment.granularity=HOUR, external.table.purge=true, EXTERNAL=TRUE, bucketing_version=2, druid.query.granularity=MINUTE, druid.datasource=default.druid_max_size_partition, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE, privileges:PrincipalPrivilegeSet(userPrivileges:{hive_test_user=[PrivilegeGrantInfo(privilege:INSERT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:SELECT, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:UPDATE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true), PrivilegeGrantInfo(privilege:DELETE, createTime:-1, grantor:hive_test_user, grantorType:USER, grantOption:true)]}, groupPrivileges:null, rolePrivileges:null), temporary:false, catName:hive, ownerType:USER) 2018-07-21T05:33:02,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:33:02,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updating table stats for druid_max_size_partition 2018-07-21T05:33:02,612 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] utils.MetaStoreUtils: Updated size of table druid_max_size_partition to 0 2018-07-21T05:33:02,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:33:02,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:33:02,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: commit insert into table druid_max_size_partition overwrite false 2018-07-21T05:33:03,511 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Moving [634] Druid segments from staging directory [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3] to Deep storage [/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage] 2018-07-21T05:33:03,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:33:03,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:33:03,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:33:03,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:33:03,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:33:03,512 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hdfs.HdfsDataSegmentPusher: Configured HDFS as deep storage 2018-07-21T05:33:03,514 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Building timeline for umbrella Interval [1969-12-31T23:00:00.000Z/1970-01-01T01:00:00.000Z] 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,527 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,527 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743901_3077, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,537 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:03,546 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,546 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743902_3078, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,558 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,566 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,566 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743903_3079, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,578 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:03,586 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,586 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743904_3080, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,595 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:03,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,603 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743905_3081, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,611 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,618 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:03,619 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,619 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743906_3082, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,627 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,636 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,636 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743907_3083, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,645 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,652 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,653 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,653 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743908_3084, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,661 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,670 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,670 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743909_3085, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,678 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,684 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,684 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743910_3086, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,692 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,699 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,699 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743911_3087, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,708 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:03,715 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,715 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743912_3088, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,723 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:03,729 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,729 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743913_3089, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,744 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,751 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,752 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,752 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,752 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,752 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,752 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743914_3090, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,760 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,767 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,767 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743915_3091, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,775 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,780 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,780 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,781 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,781 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743916_3092, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,788 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,794 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,794 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743917_3093, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,801 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:03,807 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,807 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743918_3094, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,815 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,821 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,821 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743919_3095, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,828 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:03,838 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,838 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743920_3096, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,854 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,862 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,862 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743921_3097, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,870 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:03,879 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,879 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743922_3098, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,888 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:03,895 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,896 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743923_3099, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,903 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,910 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,910 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743924_3100, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,917 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:03,924 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,924 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743925_3101, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,931 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,937 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,938 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,938 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743926_3102, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,945 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,952 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:03,952 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743927_3103, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,961 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:03,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:03,969 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743928_3104, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,977 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:03,986 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:03,987 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:03,987 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743929_3105, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:03,995 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,001 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,002 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743930_3106, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,010 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,015 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,016 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,016 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743931_3107, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,024 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,030 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,030 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743932_3108, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,041 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,047 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743933_3109, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,055 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,061 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,061 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743934_3110, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,069 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,075 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,075 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743935_3111, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,083 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,089 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743936_3112, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,112 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,119 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,119 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743937_3113, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,135 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,144 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,144 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,144 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,145 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,145 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743938_3114, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,162 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,171 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,171 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743939_3115, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,186 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,191 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,191 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743940_3116, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,205 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,215 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:04,216 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,216 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743941_3117, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,221 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,232 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,232 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,232 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,233 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,233 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743942_3118, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,640 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,645 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,645 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,646 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,646 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743943_3119, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,650 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,655 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,655 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743944_3120, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,659 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,663 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,663 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743945_3121, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,667 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,671 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,672 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743946_3122, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,675 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,679 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,679 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743947_3123, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,683 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,687 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,687 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743948_3124, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,690 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,694 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,694 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743949_3125, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,698 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,702 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,702 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743950_3126, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,711 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,715 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,716 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743951_3127, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,720 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,724 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,724 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743952_3128, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,728 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,732 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,732 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743953_3129, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,735 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,741 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,741 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743954_3130, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,745 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,749 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,749 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743955_3131, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,753 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,758 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,758 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743956_3132, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,762 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,766 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,766 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743957_3133, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,769 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,774 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,774 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743958_3134, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,779 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,783 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,783 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743959_3135, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,787 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,792 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,792 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743960_3136, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,796 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,801 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,801 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743961_3137, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,805 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,809 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,810 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,810 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,810 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743962_3138, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,814 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,818 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,818 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743963_3139, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,821 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,828 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,828 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743964_3140, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,832 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,836 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,836 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743965_3141, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,839 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,843 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,843 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743966_3142, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,847 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,851 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,851 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743967_3143, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,855 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,863 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,863 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743968_3144, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,867 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,871 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,871 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743969_3145, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,874 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,878 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743970_3146, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,882 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,886 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,887 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743971_3147, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,890 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:04,894 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,894 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743972_3148, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,897 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,901 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,901 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743973_3149, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,905 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,909 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,909 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743974_3150, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,912 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,916 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,916 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743975_3151, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,920 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,924 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,924 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743976_3152, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,927 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:04,931 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,931 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743977_3153, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,934 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,938 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,939 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743978_3154, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,942 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,946 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,946 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743979_3155, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,949 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,953 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,953 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743980_3156, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,957 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,961 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,961 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743981_3157, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,965 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,969 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,969 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743982_3158, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,973 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:04,977 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,977 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743983_3159, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,981 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:04,985 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,985 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743984_3160, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,988 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:04,993 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:04,993 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743985_3161, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:04,997 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,001 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,001 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743986_3162, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,005 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,009 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,009 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743987_3163, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,013 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,017 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,017 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743988_3164, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,021 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,025 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,025 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743989_3165, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,029 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,033 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,033 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743990_3166, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,037 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,041 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,041 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743991_3167, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,045 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,049 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,049 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743992_3168, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,053 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,057 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,057 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743993_3169, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,061 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,065 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,065 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743994_3170, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,069 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,073 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,073 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743995_3171, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,077 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,081 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,081 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743996_3172, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,085 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,089 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,089 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,089 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,090 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,090 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743997_3173, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,094 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,098 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,098 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743998_3174, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,102 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,106 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,106 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073743999_3175, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,110 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:05,114 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,114 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744000_3176, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,119 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,123 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,123 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744001_3177, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,128 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,132 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,132 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744002_3178, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,136 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,141 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,142 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,142 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,142 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744003_3179, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,145 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,150 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,150 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744004_3180, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,154 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,159 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,159 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744005_3181, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,163 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,168 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,168 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744006_3182, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,172 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:05,177 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,178 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744007_3183, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,182 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,186 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,186 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744008_3184, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,190 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,194 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,194 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744009_3185, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,198 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,204 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,204 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744010_3186, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,208 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,212 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,212 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744011_3187, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,215 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,219 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,219 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744012_3188, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,223 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,226 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:05,227 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,227 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744013_3189, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,230 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,234 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,234 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744014_3190, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,238 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,242 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,242 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,243 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,243 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744015_3191, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,246 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,250 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,250 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744016_3192, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,254 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,258 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,259 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,259 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744017_3193, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,262 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,266 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,266 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744018_3194, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,270 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,274 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,274 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744019_3195, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,278 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,282 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,282 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744020_3196, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,286 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,290 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,290 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744021_3197, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,294 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,298 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,299 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744022_3198, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,302 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,306 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,306 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744023_3199, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,309 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,313 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,313 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744024_3200, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,318 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,322 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,322 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744025_3201, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,325 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,329 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,329 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744026_3202, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,332 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,336 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,336 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744027_3203, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,339 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,343 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,343 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744028_3204, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,346 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,350 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,350 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744029_3205, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,354 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,357 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,357 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744030_3206, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,361 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,364 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,364 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744031_3207, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,368 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,372 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,372 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744032_3208, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,379 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,384 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,384 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744033_3209, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,390 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,395 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,395 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744034_3210, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,398 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,402 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,402 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744035_3211, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,407 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,411 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,411 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744036_3212, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,416 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,421 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,421 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744037_3213, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,425 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,429 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,429 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744038_3214, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,432 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,437 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,437 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744039_3215, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,441 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,445 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,445 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744040_3216, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,449 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,453 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,453 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744041_3217, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,456 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,460 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,460 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744042_3218, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,464 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,468 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,468 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744043_3219, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,471 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,475 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,475 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744044_3220, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,478 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,482 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744045_3221, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,485 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,489 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744046_3222, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,492 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,496 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,497 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,497 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,497 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744047_3223, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,500 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:05,504 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,504 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744048_3224, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,508 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,511 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,511 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744049_3225, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,515 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:05,518 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,518 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744050_3226, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,522 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,525 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,525 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,526 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,526 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744051_3227, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,529 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,533 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,533 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744052_3228, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,536 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,540 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:05,541 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,541 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744053_3229, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,945 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,951 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:05,951 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744054_3230, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,955 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:05,959 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:05,959 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744055_3231, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:05,963 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:05,967 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:05,967 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744056_3232, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,371 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,379 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,380 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,380 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,380 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,380 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,380 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744057_3233, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,428 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,432 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,432 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744058_3234, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,438 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,442 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,443 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744059_3235, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,453 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,484 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,484 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744060_3236, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,516 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,525 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,525 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744061_3237, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,530 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,534 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:06,535 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,535 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744062_3238, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,542 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,549 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,549 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744063_3239, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,553 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,558 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,558 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744064_3240, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,563 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:06,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,567 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744065_3241, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,571 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,575 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,575 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,575 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,576 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744066_3242, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,579 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,584 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,584 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744067_3243, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,588 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,592 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,592 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744068_3244, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,596 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,600 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,600 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744069_3245, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,605 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:06,609 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,609 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744070_3246, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,613 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,617 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,617 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744071_3247, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,621 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,626 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,626 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744072_3248, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,630 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,633 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,634 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,634 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744073_3249, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,637 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,641 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,641 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744074_3250, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,660 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,663 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,664 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,664 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,664 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744075_3251, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,677 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,681 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,681 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744076_3252, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,690 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,700 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,700 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744077_3253, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,704 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,708 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,708 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744078_3254, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,711 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,715 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,716 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744079_3255, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,719 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,723 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,723 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744080_3256, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,727 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,731 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,731 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744081_3257, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,734 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,738 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,738 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744082_3258, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,742 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,746 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,746 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744083_3259, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,749 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,753 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,754 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744084_3260, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,767 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,776 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,776 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744085_3261, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,781 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,787 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,787 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744086_3262, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,791 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,795 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,796 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,796 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,796 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744087_3263, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,800 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,804 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,804 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,804 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,805 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,805 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744088_3264, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,809 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,813 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,813 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744089_3265, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,820 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,824 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,824 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744090_3266, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,828 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,832 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,832 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744091_3267, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,835 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,839 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,839 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744092_3268, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,846 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,850 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,851 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744093_3269, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,854 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:06,858 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,858 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744094_3270, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,862 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,865 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,866 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744095_3271, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,869 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,874 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,874 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744096_3272, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,878 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,882 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,882 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744097_3273, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,886 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:06,890 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,890 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744098_3274, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,894 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,898 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,898 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,899 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,899 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744099_3275, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,902 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,906 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,906 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744100_3276, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,910 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,914 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,914 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744101_3277, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,918 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,922 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,922 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,922 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,922 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:06,923 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,923 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744102_3278, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,926 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,930 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,931 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744103_3279, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,934 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,938 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,938 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744104_3280, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,942 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:06,946 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:06,946 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744105_3281, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:06,949 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:06,953 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:06,953 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744106_3282, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,361 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:07,366 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,366 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744107_3283, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,370 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:07,374 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,374 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744108_3284, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,382 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:07,391 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,391 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744109_3285, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,395 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,399 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:07,400 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,400 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744110_3286, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,404 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:07,408 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,408 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744111_3287, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,411 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:07,415 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,415 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744112_3288, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,421 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,425 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,425 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744113_3289, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,429 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,434 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,435 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744114_3290, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,442 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:07,447 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,447 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744115_3291, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,451 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,460 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744116_3292, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,467 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:07,475 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,475 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744117_3293, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,480 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,492 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,492 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744118_3294, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,497 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:07,502 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,502 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744119_3295, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,506 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,511 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,511 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744120_3296, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,516 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,520 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,520 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744121_3297, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,525 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,529 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744122_3298, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,534 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:07,538 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,538 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744123_3299, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,543 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,548 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,548 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,549 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,549 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744124_3300, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,559 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,564 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,564 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744125_3301, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,568 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,573 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,573 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744126_3302, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,578 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,585 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,585 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744127_3303, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,593 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,598 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,598 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:07,599 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,599 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744128_3304, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,603 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:07,607 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,607 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744129_3305, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,613 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,617 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,618 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744130_3306, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,621 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,626 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,626 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744131_3307, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,631 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:07,636 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:07,636 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744132_3308, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:07,639 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:07,643 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:07,644 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:07,644 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:07,644 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:07,644 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744133_3309, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,049 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,054 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,055 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,055 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744134_3310, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,058 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,062 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,062 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744135_3311, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,065 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,069 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,069 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744136_3312, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,073 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,076 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,077 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744137_3313, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,080 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,084 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744138_3314, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,087 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,091 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,092 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744139_3315, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,095 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,099 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,099 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744140_3316, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,102 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,106 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,106 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744141_3317, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,110 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,113 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,114 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,114 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744142_3318, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,117 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,121 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,121 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744143_3319, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,125 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,129 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,129 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744144_3320, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,133 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,137 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,137 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744145_3321, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,141 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,145 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,145 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744146_3322, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,148 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,152 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,153 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744147_3323, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,156 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,159 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,160 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,160 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744148_3324, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,597 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,602 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,602 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744149_3325, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,606 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,611 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,611 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744150_3326, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,615 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,619 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,619 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744151_3327, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,623 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,627 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,627 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744152_3328, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,630 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,634 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,634 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744153_3329, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,638 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,642 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,642 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744154_3330, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,645 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,649 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,649 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744155_3331, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,653 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,656 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,656 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744156_3332, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,660 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,663 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,663 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744157_3333, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,667 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,670 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,671 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744158_3334, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,674 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,677 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,677 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744159_3335, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,681 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,684 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,685 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,685 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,685 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744160_3336, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,688 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,692 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,692 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744161_3337, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,696 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,699 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,699 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744162_3338, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,702 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,706 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,706 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744163_3339, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,709 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,712 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,712 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744164_3340, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,717 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,721 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,721 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744165_3341, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,725 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,729 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,729 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744166_3342, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,732 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,736 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,736 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744167_3343, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,740 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,743 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,743 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744168_3344, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,747 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,755 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,755 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744169_3345, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,759 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,763 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,763 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744170_3346, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,767 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,772 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,772 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744171_3347, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,775 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,779 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,780 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,780 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744172_3348, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,783 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,787 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:08,788 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,788 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744173_3349, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,793 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,797 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,797 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744174_3350, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,801 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,804 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,805 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,805 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,805 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744175_3351, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,808 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,812 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,812 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744176_3352, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,815 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,819 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,819 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744177_3353, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,823 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,827 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,827 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744178_3354, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,830 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,834 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,834 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744179_3355, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,837 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,846 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,846 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744180_3356, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,850 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,853 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744181_3357, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,857 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,862 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,862 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744182_3358, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,866 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:08,871 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,871 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744183_3359, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,875 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,879 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,880 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744184_3360, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,883 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,888 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,888 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744185_3361, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,891 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,895 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,896 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,896 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744186_3362, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,899 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:08,906 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:08,906 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744187_3363, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:08,910 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:08,914 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:08,915 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744188_3364, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,321 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,326 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,326 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744189_3365, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,330 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,334 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,334 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744190_3366, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,337 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,341 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,341 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,341 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,342 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,342 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744191_3367, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,345 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,349 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,349 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744192_3368, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,353 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,357 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,358 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,358 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744193_3369, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,361 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,366 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,366 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744194_3370, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,370 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,374 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,375 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744195_3371, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,382 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,386 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,387 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,387 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744196_3372, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,391 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,394 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,395 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744197_3373, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,398 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,402 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,402 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744198_3374, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,406 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,409 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,409 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,410 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,410 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744199_3375, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,413 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:09,417 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,417 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744200_3376, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,424 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,429 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,430 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744201_3377, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,434 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,437 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,437 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,437 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,438 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,438 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744202_3378, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,441 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,445 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,445 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744203_3379, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,448 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,452 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,452 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744204_3380, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,455 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,459 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,459 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744205_3381, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,462 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,466 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,467 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,467 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744206_3382, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,470 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,474 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,474 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744207_3383, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,478 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,481 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,481 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,481 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,482 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,482 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744208_3384, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,485 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:09,489 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,489 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744209_3385, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,493 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,498 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,498 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744210_3386, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,501 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,505 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,505 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:09,506 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,506 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744211_3387, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,510 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,513 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,513 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:09,514 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,514 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744212_3388, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,517 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,522 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,522 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744213_3389, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,927 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,931 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,932 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744214_3390, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,935 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,940 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,940 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744215_3391, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,943 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:09,948 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,948 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744216_3392, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,952 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,956 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,956 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744217_3393, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,960 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,964 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,964 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744218_3394, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,967 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:09,972 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,972 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744219_3395, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,976 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,981 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:09,981 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744220_3396, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,985 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,990 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,990 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744221_3397, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:09,994 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:09,998 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:09,999 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744222_3398, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,003 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,008 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,008 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744223_3399, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,012 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,017 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,018 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,018 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744224_3400, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,023 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,028 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,028 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,028 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,029 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,029 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744225_3401, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,033 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,039 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,039 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744226_3402, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,043 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,048 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,049 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,049 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744227_3403, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,053 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,058 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,058 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,058 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,059 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,059 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744228_3404, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,063 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,068 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,068 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744229_3405, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,073 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,077 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,077 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,078 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,078 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744230_3406, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,081 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,087 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,087 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744231_3407, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,091 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,096 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,096 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744232_3408, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,101 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,106 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,107 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744233_3409, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,111 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,115 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,116 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,116 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744234_3410, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,120 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,125 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,125 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744235_3411, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,129 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,134 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,134 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744236_3412, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,138 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,144 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,144 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744237_3413, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,148 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,153 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,153 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744238_3414, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,157 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,162 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,163 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744239_3415, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,167 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,172 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,173 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,173 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744240_3416, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,177 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,182 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,182 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744241_3417, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,187 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,192 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,192 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744242_3418, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,200 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,205 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,206 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744243_3419, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,210 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,215 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,215 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744244_3420, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,219 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,224 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,224 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,224 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,225 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,225 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744245_3421, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,229 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,234 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,234 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744246_3422, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,238 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,243 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,243 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744247_3423, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,247 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,253 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,253 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744248_3424, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,257 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,262 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,262 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744249_3425, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,267 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,272 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,272 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744250_3426, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,276 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,281 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,281 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744251_3427, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,285 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,289 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,290 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744252_3428, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,294 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,300 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,300 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744253_3429, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,304 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,309 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,309 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744254_3430, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,314 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,318 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,318 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,319 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,319 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744255_3431, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,323 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,328 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,328 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744256_3432, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,333 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,338 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,338 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744257_3433, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,342 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,347 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,347 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744258_3434, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,352 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,356 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,356 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744259_3435, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,360 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,365 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,365 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744260_3436, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,372 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,376 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,376 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744261_3437, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,381 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,385 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,385 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,386 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,386 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744262_3438, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,390 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,395 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,396 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744263_3439, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,400 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,405 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,405 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744264_3440, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,409 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,414 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,414 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744265_3441, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,419 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,424 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,424 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744266_3442, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,428 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,433 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,434 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,434 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744267_3443, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,438 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,443 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,443 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744268_3444, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,447 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,452 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,452 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744269_3445, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,456 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,461 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,461 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744270_3446, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,465 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,474 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,474 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744271_3447, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,478 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,484 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,484 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744272_3448, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,488 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,493 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,494 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744273_3449, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,498 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,506 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,506 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744274_3450, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,510 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,515 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,516 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,516 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744275_3451, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,519 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,524 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,524 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744276_3452, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,529 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,534 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,534 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744277_3453, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,538 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,542 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,543 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744278_3454, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,547 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,552 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,552 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744279_3455, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,556 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:10,561 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,561 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744280_3456, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,570 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,574 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,574 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744281_3457, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,578 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,583 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,583 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744282_3458, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,587 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,593 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,593 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:10,594 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,594 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744283_3459, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,598 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,603 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,603 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744284_3460, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,607 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:10,612 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,612 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744285_3461, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,619 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,623 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,624 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,624 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744286_3462, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,631 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,635 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,637 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:10,637 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744287_3463, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,643 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:10,648 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,648 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744288_3464, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,654 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,660 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,660 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744289_3465, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,664 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:10,670 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:10,670 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744290_3466, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:10,674 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:10,680 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:10,680 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744291_3467, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,071 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:33:11,085 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,094 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,094 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744292_3468, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,097 WARN [ContainersLauncher #2] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0001_01_000008 is : 143 2018-07-21T05:33:11,099 DEBUG [ContainersLauncher #2] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #2, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:33:11,103 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,108 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,108 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744293_3469, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,113 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,118 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,118 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744294_3470, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,122 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,129 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,130 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744295_3471, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,134 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,139 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,139 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744296_3472, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,148 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,156 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,156 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744297_3473, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,163 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,169 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,169 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,169 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,169 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,169 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,170 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,170 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,170 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,170 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,170 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744298_3474, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,175 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,181 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,181 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744299_3475, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,190 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,194 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,195 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,195 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744300_3476, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,199 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,204 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,204 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,204 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,204 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,204 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,205 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,205 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744301_3477, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,209 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,215 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,215 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,215 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,215 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,216 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,216 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,216 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,216 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,216 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744302_3478, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,220 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,226 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,226 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744303_3479, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,230 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,235 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,236 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,236 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744304_3480, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,242 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,247 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,247 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,247 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,247 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,248 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,248 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744305_3481, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,252 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,258 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,258 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744306_3482, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,263 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,268 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,268 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,269 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,269 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744307_3483, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,273 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,279 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,279 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,280 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,280 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744308_3484, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,285 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,291 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,291 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744309_3485, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,295 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,301 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,301 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744310_3486, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,305 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:11,311 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,311 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744311_3487, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,316 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,323 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,323 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744312_3488, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,328 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,334 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,334 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744313_3489, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,341 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:11,347 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,347 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744314_3490, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,355 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,361 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,361 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744315_3491, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,366 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,371 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,371 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744316_3492, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,377 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,381 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,381 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744317_3493, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,388 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,393 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,393 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744318_3494, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,398 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:11,403 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,403 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744319_3495, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,408 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,413 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,413 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744320_3496, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,417 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,423 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,423 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744321_3497, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,428 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,433 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,433 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744322_3498, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,438 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,443 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,443 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744323_3499, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,454 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,460 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,460 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744324_3500, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,465 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:11,471 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,471 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744325_3501, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,477 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:11,483 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,483 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744326_3502, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,488 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,493 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,494 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,494 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744327_3503, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,498 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,504 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,504 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744328_3504, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,512 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,518 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,518 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744329_3505, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,523 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,529 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,529 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744330_3506, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,534 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,547 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,547 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744331_3507, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,552 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,558 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,558 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744332_3508, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,562 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:11,568 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,568 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744333_3509, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,573 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,578 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:11,579 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:11,579 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744334_3510, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:11,988 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:11,995 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:11,995 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744335_3511, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,000 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,019 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,019 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744336_3512, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,026 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,033 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:12,034 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,034 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744337_3513, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,039 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:12,045 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,046 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744338_3514, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,054 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:12,060 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,060 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744339_3515, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,067 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,072 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,072 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744340_3516, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,078 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:12,084 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,084 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744341_3517, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,490 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:12,495 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,495 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744342_3518, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,499 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,505 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,505 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:12,506 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,506 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744343_3519, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,510 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:12,515 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,515 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744344_3520, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,520 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,539 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,540 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,540 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:12,540 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,540 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744345_3521, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,544 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:12,549 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,549 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744346_3522, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,554 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,558 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,558 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,558 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:12,559 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,559 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,559 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,559 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,559 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,559 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744347_3523, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,563 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:12,567 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,568 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744348_3524, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,572 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:12,577 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,577 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744349_3525, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,582 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,586 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,587 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:12,587 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,587 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744350_3526, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,591 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:12,595 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,595 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744351_3527, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,599 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,606 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:12,607 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,607 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744352_3528, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,611 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:12,617 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,617 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744353_3529, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,622 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,627 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,627 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744354_3530, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,632 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:12,638 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,638 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744355_3531, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,643 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,649 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,649 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,649 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:12,650 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,650 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744356_3532, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,655 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,661 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:12,661 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744357_3533, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:12,666 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:12,671 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:12,671 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744358_3534, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,076 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,082 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,082 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744359_3535, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,086 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,092 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,092 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744360_3536, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,096 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,102 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,102 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744361_3537, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,106 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,111 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,112 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744362_3538, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,115 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,120 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,120 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744363_3539, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,125 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,129 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,129 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744364_3540, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,133 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,138 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,138 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744365_3541, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,142 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,147 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,147 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744366_3542, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,151 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,155 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,155 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744367_3543, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,159 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,164 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,164 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744368_3544, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,169 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,174 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,174 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744369_3545, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,178 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,184 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,184 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744370_3546, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,188 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,193 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,193 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744371_3547, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,198 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,203 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,203 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744372_3548, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,207 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,213 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,214 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744373_3549, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,215 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,215 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,218 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,218 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:33:13,219 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,224 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,224 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744374_3550, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,229 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,234 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,235 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,235 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,235 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744375_3551, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,239 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,244 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,244 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,244 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,245 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,245 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,245 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,245 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,245 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,245 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744376_3552, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,249 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,254 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,254 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744377_3553, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,258 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,263 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,263 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744378_3554, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,268 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,273 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,273 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744379_3555, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,278 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,283 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,283 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744380_3556, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,287 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,291 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,291 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744381_3557, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,295 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,300 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,301 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744382_3558, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,304 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,309 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,309 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744383_3559, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,313 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,318 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,318 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744384_3560, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,324 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,330 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,330 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744385_3561, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,334 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,339 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,339 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744386_3562, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,343 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,348 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,348 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744387_3563, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,352 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,357 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,357 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744388_3564, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,362 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,366 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,367 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,367 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744389_3565, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,371 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,376 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,377 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744390_3566, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,381 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,386 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,386 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744391_3567, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,390 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,397 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,397 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744392_3568, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,402 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,407 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,407 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744393_3569, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,412 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,418 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,418 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744394_3570, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,423 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,429 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,429 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744395_3571, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,434 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,440 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,441 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,441 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,441 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744396_3572, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,445 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,451 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,451 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744397_3573, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,455 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,460 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,460 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744398_3574, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,464 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,469 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,469 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744399_3575, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,474 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,479 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,479 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744400_3576, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,484 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,489 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,489 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744401_3577, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,494 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,499 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,499 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744402_3578, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,504 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,508 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,509 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,509 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744403_3579, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,513 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,520 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,520 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744404_3580, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,525 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,530 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,530 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744405_3581, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,534 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,547 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,547 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744406_3582, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,551 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,556 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,556 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744407_3583, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,561 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,567 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,567 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744408_3584, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,571 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,576 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,577 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,577 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744409_3585, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,581 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,586 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,586 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744410_3586, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,590 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,596 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,596 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744411_3587, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,601 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,606 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,606 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744412_3588, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,611 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,616 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,616 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744413_3589, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,620 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,624 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,624 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744414_3590, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,628 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,633 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,633 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744415_3591, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,638 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,642 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,643 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,643 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,643 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,643 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744416_3592, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,647 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,654 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,654 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744417_3593, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,658 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,664 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,664 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744418_3594, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,668 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,674 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,674 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744419_3595, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,679 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,685 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,685 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744420_3596, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,689 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:13,694 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,694 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744421_3597, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,698 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,703 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,703 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744422_3598, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,707 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,712 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,712 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744423_3599, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,717 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,722 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,722 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744424_3600, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,726 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,731 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,731 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744425_3601, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,736 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,741 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,741 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744426_3602, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,746 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,750 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,751 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,751 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744427_3603, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,756 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,761 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,761 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744428_3604, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,765 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,769 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:13,770 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,770 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744429_3605, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,774 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,778 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,778 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744430_3606, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,782 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,787 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,787 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744431_3607, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,791 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,802 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,802 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744432_3608, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,813 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:13,818 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,818 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744433_3609, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,832 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:13,838 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,838 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744434_3610, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,850 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:13,865 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,865 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744435_3611, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:13,875 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:13,880 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:13,880 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744436_3612, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,291 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,297 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,297 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744437_3613, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,301 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:14,306 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,306 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744438_3614, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,310 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,315 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,315 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744439_3615, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,319 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:14,324 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,324 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744440_3616, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,328 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,332 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,332 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744441_3617, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,336 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:14,341 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,341 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744442_3618, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,345 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:14,350 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,350 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744443_3619, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,354 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,359 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,359 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744444_3620, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,363 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:14,368 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,368 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744445_3621, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,372 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:14,377 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,377 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744446_3622, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,381 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:14,385 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,385 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744447_3623, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,790 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:14,795 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,796 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744448_3624, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,800 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:14,808 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,808 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744449_3625, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,818 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,823 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,823 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744450_3626, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,827 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:14,833 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,833 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744451_3627, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,840 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:14,845 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,845 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744452_3628, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,849 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,854 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,854 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744453_3629, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,858 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:14,862 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,863 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744454_3630, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,867 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,871 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,871 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744455_3631, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,881 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:14,886 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,886 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744456_3632, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,891 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:14,895 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:14,895 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744457_3633, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,899 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,904 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,904 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744458_3634, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:14,907 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:14,913 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:14,913 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744459_3635, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,318 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,324 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,324 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744460_3636, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,328 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,334 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,334 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744461_3637, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,343 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,348 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,348 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744462_3638, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,354 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,360 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,360 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744463_3639, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,367 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,379 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,379 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744464_3640, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,396 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,414 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,414 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744465_3641, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,427 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,435 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,435 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744466_3642, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,442 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,448 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,448 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744467_3643, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,454 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:15,461 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,461 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744468_3644, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,867 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,872 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,872 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744469_3645, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,876 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,891 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,891 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744470_3646, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,911 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,918 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,919 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,919 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744471_3647, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,923 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,927 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,927 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744472_3648, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,930 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,933 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,933 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744473_3649, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,937 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,940 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,940 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:15,941 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,941 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744474_3650, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,944 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,948 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,948 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744475_3651, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,953 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,957 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,957 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744476_3652, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,964 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,968 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,968 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,968 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:15,968 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:15,969 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:15,969 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744477_3653, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,978 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:15,982 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,982 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744478_3654, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:15,990 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:15,995 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:15,995 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744479_3655, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,007 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,013 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,014 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,014 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744480_3656, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,025 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,029 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,029 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744481_3657, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,034 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,039 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,039 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744482_3658, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,043 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,047 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:16,048 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,048 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744483_3659, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,051 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,054 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,055 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744484_3660, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,058 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,062 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,062 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,062 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:16,063 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,063 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,063 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,063 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:16,063 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,063 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744485_3661, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,066 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,070 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:16,071 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,071 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744486_3662, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,082 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,089 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,089 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744487_3663, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,098 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,102 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,102 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744488_3664, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,106 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,113 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:16,114 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,114 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744489_3665, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,123 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,129 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,129 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744490_3666, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,139 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:16,157 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,157 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744491_3667, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,564 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,569 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,570 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,570 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744492_3668, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,574 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,579 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,579 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744493_3669, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,583 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,589 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,589 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744494_3670, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,594 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:16,599 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,600 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744495_3671, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,604 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:16,612 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,612 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744496_3672, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,617 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,626 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,626 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744497_3673, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,632 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:16,637 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,637 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744498_3674, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,641 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,650 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,650 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744499_3675, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,660 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,666 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,666 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744500_3676, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,671 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,676 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,676 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744501_3677, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,681 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:16,689 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,690 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744502_3678, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,695 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,701 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,702 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,702 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744503_3679, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,707 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:16,712 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:16,712 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744504_3680, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:16,717 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:16,727 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:16,727 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744505_3681, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,131 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,136 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,137 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,137 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744506_3682, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,140 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,146 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,146 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744507_3683, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,150 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,154 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,154 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744508_3684, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,157 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:17,162 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,162 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744509_3685, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,166 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:17,171 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,171 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744510_3686, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,174 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,178 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,179 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,179 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744511_3687, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,182 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,186 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:17,187 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,187 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744512_3688, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,191 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,195 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,195 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,195 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,195 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,196 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,196 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,196 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,196 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,196 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,196 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744513_3689, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,199 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,204 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,204 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744514_3690, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,207 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,212 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,212 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744515_3691, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,216 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,220 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,220 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744516_3692, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,226 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:17,230 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,230 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744517_3693, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,234 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,238 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,238 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744518_3694, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,242 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:33:17,246 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,246 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744519_3695, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,249 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,253 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,253 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744520_3696, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,256 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:17,260 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,260 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744521_3697, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,264 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,268 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,268 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744522_3698, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,272 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,276 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,276 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744523_3699, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,282 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:33:17,293 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,293 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744524_3700, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,297 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,301 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,301 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744525_3701, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,305 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,310 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,310 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744526_3702, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,314 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:33:17,319 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,319 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744527_3703, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,322 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,326 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,327 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:17,327 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,327 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744528_3704, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,330 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,334 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,334 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744529_3705, replicas=127.0.0.1:33099, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,337 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:33:17,341 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,341 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744530_3706, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,344 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:33:17,349 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,349 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744531_3707, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,352 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:33:17,356 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:33:17,356 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744532_3708, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,359 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,364 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,364 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:17,365 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:33:17,365 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744533_3709, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,368 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,372 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,372 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:33:17,373 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:33:17,373 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744534_3710, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json 2018-07-21T05:33:17,376 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/deep-storage/default.druid_max_size_partition/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_31_59.547-07_00/descriptor.json is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7 2018-07-21T05:33:17,377 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29 2018-07-21T05:33:17,378 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46 2018-07-21T05:33:17,379 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66 2018-07-21T05:33:17,380 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88 2018-07-21T05:33:17,381 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124 2018-07-21T05:33:17,382 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18 2018-07-21T05:33:17,383 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51 2018-07-21T05:33:17,384 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84 2018-07-21T05:33:17,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110 2018-07-21T05:33:17,386 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131 2018-07-21T05:33:17,388 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170 2018-07-21T05:33:17,389 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200 2018-07-21T05:33:17,390 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242 2018-07-21T05:33:17,391 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263 2018-07-21T05:33:17,392 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301 2018-07-21T05:33:17,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340 2018-07-21T05:33:17,394 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383 2018-07-21T05:33:17,395 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428 2018-07-21T05:33:17,396 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468 2018-07-21T05:33:17,397 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489 2018-07-21T05:33:17,398 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandlerUtils: Published default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490 2018-07-21T05:33:17,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: checking load status from coordinator localhost:8081 2018-07-21T05:33:17,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/status] starting 2018-07-21T05:33:17,622 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: DefaultHttpResponse(chunked: true) HTTP/1.1 200 OK Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Vary: Accept-Encoding, User-Agent Transfer-Encoding: chunked Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,623 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] Got response: 200 OK 2018-07-21T05:33:17,623 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.DefaultHttpChunk@8e391 2018-07-21T05:33:17,623 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 409B, last=false 2018-07-21T05:33:17,623 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] messageReceived: org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpChunk$1@44900bf 2018-07-21T05:33:17,623 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/status] Got chunk: 0B, last=true 2018-07-21T05:33:17,623 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Waiting for the loading of [634] segments 2018-07-21T05:33:17,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:33:17,634 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,634 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:33:17,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:33:17,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:33:17,645 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,645 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:33:17,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:33:17,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:33:17,649 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,649 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:33:17,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:33:17,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:33:17,652 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,652 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:33:17,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:33:17,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:33:17,658 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,658 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:33:17,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:33:17,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:33:17,662 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,662 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:33:17,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:33:17,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:33:17,666 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,666 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:33:17,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:33:17,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:33:17,670 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,670 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:33:17,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:33:17,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:33:17,674 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,674 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:33:17,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:33:17,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] starting 2018-07-21T05:33:17,677 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,677 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] Got response: 204 No Content 2018-07-21T05:33:17,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] response is [] 2018-07-21T05:33:17,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] starting 2018-07-21T05:33:17,681 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,681 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] Got response: 204 No Content 2018-07-21T05:33:17,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] response is [] 2018-07-21T05:33:17,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:33:17,685 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,685 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:33:17,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:33:17,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] starting 2018-07-21T05:33:17,689 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,689 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] Got response: 204 No Content 2018-07-21T05:33:17,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] response is [] 2018-07-21T05:33:17,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:33:17,693 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,693 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:33:17,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:33:17,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:33:17,697 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,697 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:33:17,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:33:17,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] starting 2018-07-21T05:33:17,701 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,701 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] Got response: 204 No Content 2018-07-21T05:33:17,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] response is [] 2018-07-21T05:33:17,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] starting 2018-07-21T05:33:17,704 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,704 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] Got response: 204 No Content 2018-07-21T05:33:17,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] response is [] 2018-07-21T05:33:17,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:33:17,712 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,712 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:33:17,712 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:33:17,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:33:17,721 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,721 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:33:17,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:33:17,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] starting 2018-07-21T05:33:17,725 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,725 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] Got response: 204 No Content 2018-07-21T05:33:17,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] response is [] 2018-07-21T05:33:17,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] starting 2018-07-21T05:33:17,728 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,728 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] Got response: 204 No Content 2018-07-21T05:33:17,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] response is [] 2018-07-21T05:33:17,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:33:17,732 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,732 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:33:17,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:33:17,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:33:17,735 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,735 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:33:17,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:33:17,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] starting 2018-07-21T05:33:17,738 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,738 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] Got response: 204 No Content 2018-07-21T05:33:17,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] response is [] 2018-07-21T05:33:17,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] starting 2018-07-21T05:33:17,744 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,744 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] Got response: 204 No Content 2018-07-21T05:33:17,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] response is [] 2018-07-21T05:33:17,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] starting 2018-07-21T05:33:17,752 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,753 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] Got response: 204 No Content 2018-07-21T05:33:17,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] response is [] 2018-07-21T05:33:17,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:33:17,754 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,754 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:33:17,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:33:17,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:33:17,756 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,756 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:33:17,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:33:17,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:33:17,766 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,766 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:33:17,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:33:17,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] starting 2018-07-21T05:33:17,770 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,770 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] Got response: 204 No Content 2018-07-21T05:33:17,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] response is [] 2018-07-21T05:33:17,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] starting 2018-07-21T05:33:17,773 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,773 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] Got response: 204 No Content 2018-07-21T05:33:17,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] response is [] 2018-07-21T05:33:17,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:33:17,777 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,777 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:33:17,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:33:17,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:33:17,779 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,779 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:33:17,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:33:17,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] starting 2018-07-21T05:33:17,782 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,782 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] Got response: 204 No Content 2018-07-21T05:33:17,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] response is [] 2018-07-21T05:33:17,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] starting 2018-07-21T05:33:17,786 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,786 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] Got response: 204 No Content 2018-07-21T05:33:17,790 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] response is [] 2018-07-21T05:33:17,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] starting 2018-07-21T05:33:17,794 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,794 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] Got response: 204 No Content 2018-07-21T05:33:17,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] response is [] 2018-07-21T05:33:17,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:33:17,798 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,798 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:33:17,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:33:17,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:33:17,801 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,801 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:33:17,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:33:17,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] starting 2018-07-21T05:33:17,810 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,810 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] Got response: 204 No Content 2018-07-21T05:33:17,810 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] response is [] 2018-07-21T05:33:17,810 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] starting 2018-07-21T05:33:17,811 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,811 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] Got response: 204 No Content 2018-07-21T05:33:17,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] response is [] 2018-07-21T05:33:17,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] starting 2018-07-21T05:33:17,812 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,812 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] Got response: 204 No Content 2018-07-21T05:33:17,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] response is [] 2018-07-21T05:33:17,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] starting 2018-07-21T05:33:17,813 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,813 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] Got response: 204 No Content 2018-07-21T05:33:17,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] response is [] 2018-07-21T05:33:17,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] starting 2018-07-21T05:33:17,814 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,814 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] Got response: 204 No Content 2018-07-21T05:33:17,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] response is [] 2018-07-21T05:33:17,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] starting 2018-07-21T05:33:17,816 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,816 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] Got response: 204 No Content 2018-07-21T05:33:17,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] response is [] 2018-07-21T05:33:17,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] starting 2018-07-21T05:33:17,817 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,817 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] Got response: 204 No Content 2018-07-21T05:33:17,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] response is [] 2018-07-21T05:33:17,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] starting 2018-07-21T05:33:17,818 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,818 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] Got response: 204 No Content 2018-07-21T05:33:17,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] response is [] 2018-07-21T05:33:17,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] starting 2018-07-21T05:33:17,818 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,819 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] Got response: 204 No Content 2018-07-21T05:33:17,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] response is [] 2018-07-21T05:33:17,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:33:17,821 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,821 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:33:17,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:33:17,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] starting 2018-07-21T05:33:17,824 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,824 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] Got response: 204 No Content 2018-07-21T05:33:17,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] response is [] 2018-07-21T05:33:17,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:33:17,826 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,826 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:33:17,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:33:17,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] starting 2018-07-21T05:33:17,827 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,827 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] Got response: 204 No Content 2018-07-21T05:33:17,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] response is [] 2018-07-21T05:33:17,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] starting 2018-07-21T05:33:17,832 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,833 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] Got response: 204 No Content 2018-07-21T05:33:17,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] response is [] 2018-07-21T05:33:17,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:33:17,834 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,834 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:33:17,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:33:17,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:33:17,834 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,834 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:33:17,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:33:17,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:33:17,836 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,836 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:33:17,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:33:17,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] starting 2018-07-21T05:33:17,837 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,837 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] Got response: 204 No Content 2018-07-21T05:33:17,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] response is [] 2018-07-21T05:33:17,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:33:17,838 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,839 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:33:17,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:33:17,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:33:17,840 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,840 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:33:17,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:33:17,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:33:17,843 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,843 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:33:17,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:33:17,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:33:17,849 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,849 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:33:17,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:33:17,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:33:17,850 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,850 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:33:17,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:33:17,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:33:17,852 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,852 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:33:17,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:33:17,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:33:17,853 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,853 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:33:17,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:33:17,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:33:17,854 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,854 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:33:17,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:33:17,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] starting 2018-07-21T05:33:17,855 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,855 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] Got response: 204 No Content 2018-07-21T05:33:17,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] response is [] 2018-07-21T05:33:17,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:33:17,856 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,856 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:33:17,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:33:17,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] starting 2018-07-21T05:33:17,857 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,858 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] Got response: 204 No Content 2018-07-21T05:33:17,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] response is [] 2018-07-21T05:33:17,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:33:17,859 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,859 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:33:17,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:33:17,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:33:17,860 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,860 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:33:17,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:33:17,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] starting 2018-07-21T05:33:17,861 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,861 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] Got response: 204 No Content 2018-07-21T05:33:17,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] response is [] 2018-07-21T05:33:17,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] starting 2018-07-21T05:33:17,862 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,862 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] Got response: 204 No Content 2018-07-21T05:33:17,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] response is [] 2018-07-21T05:33:17,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:33:17,863 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,863 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:33:17,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:33:17,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] starting 2018-07-21T05:33:17,864 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,864 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] Got response: 204 No Content 2018-07-21T05:33:17,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] response is [] 2018-07-21T05:33:17,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] starting 2018-07-21T05:33:17,865 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,865 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] Got response: 204 No Content 2018-07-21T05:33:17,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] response is [] 2018-07-21T05:33:17,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:33:17,866 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,866 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:33:17,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:33:17,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:33:17,867 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,867 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:33:17,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:33:17,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:33:17,868 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,868 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:33:17,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:33:17,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] starting 2018-07-21T05:33:17,869 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,869 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] Got response: 204 No Content 2018-07-21T05:33:17,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] response is [] 2018-07-21T05:33:17,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] starting 2018-07-21T05:33:17,870 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,870 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] Got response: 204 No Content 2018-07-21T05:33:17,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] response is [] 2018-07-21T05:33:17,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:33:17,871 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,871 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:33:17,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:33:17,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:33:17,872 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,872 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:33:17,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:33:17,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:33:17,873 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,873 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:33:17,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:33:17,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] starting 2018-07-21T05:33:17,874 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,874 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] Got response: 204 No Content 2018-07-21T05:33:17,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] response is [] 2018-07-21T05:33:17,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] starting 2018-07-21T05:33:17,875 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,875 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] Got response: 204 No Content 2018-07-21T05:33:17,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] response is [] 2018-07-21T05:33:17,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:33:17,876 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,876 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:33:17,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:33:17,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:33:17,877 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,877 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:33:17,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:33:17,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] starting 2018-07-21T05:33:17,879 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,879 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] Got response: 204 No Content 2018-07-21T05:33:17,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] response is [] 2018-07-21T05:33:17,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] starting 2018-07-21T05:33:17,885 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,886 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] Got response: 204 No Content 2018-07-21T05:33:17,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] response is [] 2018-07-21T05:33:17,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:33:17,887 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,887 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:33:17,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:33:17,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:33:17,888 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,888 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:33:17,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:33:17,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:33:17,889 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,889 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:33:17,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:33:17,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] starting 2018-07-21T05:33:17,890 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,890 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] Got response: 204 No Content 2018-07-21T05:33:17,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] response is [] 2018-07-21T05:33:17,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] starting 2018-07-21T05:33:17,891 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,891 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] Got response: 204 No Content 2018-07-21T05:33:17,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] response is [] 2018-07-21T05:33:17,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:33:17,892 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,892 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:33:17,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:33:17,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] starting 2018-07-21T05:33:17,895 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,895 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] Got response: 204 No Content 2018-07-21T05:33:17,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] response is [] 2018-07-21T05:33:17,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] starting 2018-07-21T05:33:17,897 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,897 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] Got response: 204 No Content 2018-07-21T05:33:17,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] response is [] 2018-07-21T05:33:17,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:33:17,898 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,898 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:33:17,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:33:17,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] starting 2018-07-21T05:33:17,899 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,899 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] Got response: 204 No Content 2018-07-21T05:33:17,899 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] response is [] 2018-07-21T05:33:17,899 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] starting 2018-07-21T05:33:17,900 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,900 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] Got response: 204 No Content 2018-07-21T05:33:17,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] response is [] 2018-07-21T05:33:17,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:33:17,901 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,901 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:33:17,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:33:17,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:33:17,902 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,902 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:33:17,902 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:33:17,902 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] starting 2018-07-21T05:33:17,903 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,903 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] Got response: 204 No Content 2018-07-21T05:33:17,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] response is [] 2018-07-21T05:33:17,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:33:17,904 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,904 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:33:17,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:33:17,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] starting 2018-07-21T05:33:17,905 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,905 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] Got response: 204 No Content 2018-07-21T05:33:17,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] response is [] 2018-07-21T05:33:17,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] starting 2018-07-21T05:33:17,906 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,906 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] Got response: 204 No Content 2018-07-21T05:33:17,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] response is [] 2018-07-21T05:33:17,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:33:17,907 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,907 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:33:17,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:33:17,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:33:17,908 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,908 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:33:17,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:33:17,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] starting 2018-07-21T05:33:17,909 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,909 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] Got response: 204 No Content 2018-07-21T05:33:17,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] response is [] 2018-07-21T05:33:17,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:33:17,910 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,910 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:33:17,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:33:17,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:33:17,911 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,911 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:33:17,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:33:17,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] starting 2018-07-21T05:33:17,912 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,912 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] Got response: 204 No Content 2018-07-21T05:33:17,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] response is [] 2018-07-21T05:33:17,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] starting 2018-07-21T05:33:17,913 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,913 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] Got response: 204 No Content 2018-07-21T05:33:17,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] response is [] 2018-07-21T05:33:17,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:33:17,914 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,914 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:33:17,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:33:17,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:33:17,915 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,915 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:33:17,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:33:17,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:33:17,917 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,917 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:33:17,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:33:17,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:33:17,918 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,918 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:33:17,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:33:17,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:33:17,919 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,919 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:33:17,919 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:33:17,919 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:33:17,920 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,920 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:33:17,920 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:33:17,920 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:33:17,921 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,921 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:33:17,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:33:17,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:33:17,922 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,922 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:33:17,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:33:17,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:33:17,924 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,924 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:33:17,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:33:17,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:33:17,925 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,925 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:33:17,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:33:17,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] starting 2018-07-21T05:33:17,926 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,926 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] Got response: 204 No Content 2018-07-21T05:33:17,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] response is [] 2018-07-21T05:33:17,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:33:17,927 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,927 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:33:17,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:33:17,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:33:17,928 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,928 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:33:17,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:33:17,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] starting 2018-07-21T05:33:17,929 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,929 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] Got response: 204 No Content 2018-07-21T05:33:17,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] response is [] 2018-07-21T05:33:17,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] starting 2018-07-21T05:33:17,930 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,930 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] Got response: 204 No Content 2018-07-21T05:33:17,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] response is [] 2018-07-21T05:33:17,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] starting 2018-07-21T05:33:17,935 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,935 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] Got response: 204 No Content 2018-07-21T05:33:17,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] response is [] 2018-07-21T05:33:17,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:33:17,937 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,937 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:33:17,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:33:17,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] starting 2018-07-21T05:33:17,938 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,938 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] Got response: 204 No Content 2018-07-21T05:33:17,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] response is [] 2018-07-21T05:33:17,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] starting 2018-07-21T05:33:17,939 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,939 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] Got response: 204 No Content 2018-07-21T05:33:17,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] response is [] 2018-07-21T05:33:17,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:33:17,940 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,940 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:33:17,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:33:17,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:33:17,941 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,941 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:33:17,941 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:33:17,941 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:33:17,942 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,942 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:33:17,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:33:17,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] starting 2018-07-21T05:33:17,943 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,943 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] Got response: 204 No Content 2018-07-21T05:33:17,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] response is [] 2018-07-21T05:33:17,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] starting 2018-07-21T05:33:17,944 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,944 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] Got response: 204 No Content 2018-07-21T05:33:17,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] response is [] 2018-07-21T05:33:17,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] starting 2018-07-21T05:33:17,945 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,945 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] Got response: 204 No Content 2018-07-21T05:33:17,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] response is [] 2018-07-21T05:33:17,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:33:17,946 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,946 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:33:17,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:33:17,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:33:17,947 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,947 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:33:17,947 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:33:17,947 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:33:17,948 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,948 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:33:17,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:33:17,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] starting 2018-07-21T05:33:17,949 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,949 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] Got response: 204 No Content 2018-07-21T05:33:17,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] response is [] 2018-07-21T05:33:17,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] starting 2018-07-21T05:33:17,950 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,950 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] Got response: 204 No Content 2018-07-21T05:33:17,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] response is [] 2018-07-21T05:33:17,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] starting 2018-07-21T05:33:17,951 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,951 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] Got response: 204 No Content 2018-07-21T05:33:17,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] response is [] 2018-07-21T05:33:17,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:33:17,954 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,954 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:33:17,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:33:17,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:33:17,955 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,955 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:33:17,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:33:17,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] starting 2018-07-21T05:33:17,956 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,956 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] Got response: 204 No Content 2018-07-21T05:33:17,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] response is [] 2018-07-21T05:33:17,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] starting 2018-07-21T05:33:17,957 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,957 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] Got response: 204 No Content 2018-07-21T05:33:17,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] response is [] 2018-07-21T05:33:17,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] starting 2018-07-21T05:33:17,958 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,958 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] Got response: 204 No Content 2018-07-21T05:33:17,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] response is [] 2018-07-21T05:33:17,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:33:17,959 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,959 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:33:17,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:33:17,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:33:17,960 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,960 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:33:17,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:33:17,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] starting 2018-07-21T05:33:17,961 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,961 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] Got response: 204 No Content 2018-07-21T05:33:17,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] response is [] 2018-07-21T05:33:17,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] starting 2018-07-21T05:33:17,961 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,961 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] Got response: 204 No Content 2018-07-21T05:33:17,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] response is [] 2018-07-21T05:33:17,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] starting 2018-07-21T05:33:17,962 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,962 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] Got response: 204 No Content 2018-07-21T05:33:17,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] response is [] 2018-07-21T05:33:17,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:33:17,963 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,963 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:33:17,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:33:17,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] starting 2018-07-21T05:33:17,964 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,964 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] Got response: 204 No Content 2018-07-21T05:33:17,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] response is [] 2018-07-21T05:33:17,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] starting 2018-07-21T05:33:17,965 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,965 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] Got response: 204 No Content 2018-07-21T05:33:17,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] response is [] 2018-07-21T05:33:17,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] starting 2018-07-21T05:33:17,966 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,966 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] Got response: 204 No Content 2018-07-21T05:33:17,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] response is [] 2018-07-21T05:33:17,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:33:17,967 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,967 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:33:17,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:33:17,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] starting 2018-07-21T05:33:17,968 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,968 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] Got response: 204 No Content 2018-07-21T05:33:17,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] response is [] 2018-07-21T05:33:17,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] starting 2018-07-21T05:33:17,969 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,969 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] Got response: 204 No Content 2018-07-21T05:33:17,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] response is [] 2018-07-21T05:33:17,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] starting 2018-07-21T05:33:17,970 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,970 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] Got response: 204 No Content 2018-07-21T05:33:17,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] response is [] 2018-07-21T05:33:17,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:33:17,970 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,970 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:33:17,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:33:17,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] starting 2018-07-21T05:33:17,971 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,971 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] Got response: 204 No Content 2018-07-21T05:33:17,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] response is [] 2018-07-21T05:33:17,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] starting 2018-07-21T05:33:17,972 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,972 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] Got response: 204 No Content 2018-07-21T05:33:17,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] response is [] 2018-07-21T05:33:17,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:33:17,973 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,973 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:33:17,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:33:17,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] starting 2018-07-21T05:33:17,974 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,974 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] Got response: 204 No Content 2018-07-21T05:33:17,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] response is [] 2018-07-21T05:33:17,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] starting 2018-07-21T05:33:17,975 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,975 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] Got response: 204 No Content 2018-07-21T05:33:17,975 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] response is [] 2018-07-21T05:33:17,975 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] starting 2018-07-21T05:33:17,976 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,976 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] Got response: 204 No Content 2018-07-21T05:33:17,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] response is [] 2018-07-21T05:33:17,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:33:17,977 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,977 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:33:17,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:33:17,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:33:17,978 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,978 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:33:17,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:33:17,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] starting 2018-07-21T05:33:17,979 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,979 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] Got response: 204 No Content 2018-07-21T05:33:17,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] response is [] 2018-07-21T05:33:17,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] starting 2018-07-21T05:33:17,980 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,980 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] Got response: 204 No Content 2018-07-21T05:33:17,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] response is [] 2018-07-21T05:33:17,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:33:17,981 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,981 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:33:17,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:33:17,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:33:17,982 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,982 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:33:17,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:33:17,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] starting 2018-07-21T05:33:17,983 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,983 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] Got response: 204 No Content 2018-07-21T05:33:17,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] response is [] 2018-07-21T05:33:17,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] starting 2018-07-21T05:33:17,983 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,983 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] Got response: 204 No Content 2018-07-21T05:33:17,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] response is [] 2018-07-21T05:33:17,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] starting 2018-07-21T05:33:17,984 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,984 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] Got response: 204 No Content 2018-07-21T05:33:17,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] response is [] 2018-07-21T05:33:17,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:33:17,985 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,985 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:33:17,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:33:17,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:33:17,986 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,986 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:33:17,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:33:17,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:33:17,987 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,987 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:33:17,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:33:17,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:33:17,988 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,988 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:33:17,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:33:17,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] starting 2018-07-21T05:33:17,989 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,989 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] Got response: 204 No Content 2018-07-21T05:33:17,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] response is [] 2018-07-21T05:33:17,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:33:17,990 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,990 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:33:17,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:33:17,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:33:17,990 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,990 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:33:17,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:33:17,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:33:17,991 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,991 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:33:17,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:33:17,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:33:17,992 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,992 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:33:17,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:33:17,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:33:17,993 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,993 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:33:17,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:33:17,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:33:17,994 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,994 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:33:17,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:33:17,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:33:17,995 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,995 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:33:17,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:33:17,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:33:17,996 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,997 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:33:17,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:33:17,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:33:17,998 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,998 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:33:17,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:33:17,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:33:17,999 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,999 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:33:17,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:33:17,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:33:17,999 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:17 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:17,999 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:33:17,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:33:17,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] starting 2018-07-21T05:33:18,000 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,000 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] Got response: 204 No Content 2018-07-21T05:33:18,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] response is [] 2018-07-21T05:33:18,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] starting 2018-07-21T05:33:18,001 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,001 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] Got response: 204 No Content 2018-07-21T05:33:18,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] response is [] 2018-07-21T05:33:18,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:33:18,002 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,002 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:33:18,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:33:18,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] starting 2018-07-21T05:33:18,003 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,003 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] Got response: 204 No Content 2018-07-21T05:33:18,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] response is [] 2018-07-21T05:33:18,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:33:18,004 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,004 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:33:18,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:33:18,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:33:18,004 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,005 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:33:18,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:33:18,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] starting 2018-07-21T05:33:18,006 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,006 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] Got response: 204 No Content 2018-07-21T05:33:18,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] response is [] 2018-07-21T05:33:18,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] starting 2018-07-21T05:33:18,006 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,006 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] Got response: 204 No Content 2018-07-21T05:33:18,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] response is [] 2018-07-21T05:33:18,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] starting 2018-07-21T05:33:18,007 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,007 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] Got response: 204 No Content 2018-07-21T05:33:18,008 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] response is [] 2018-07-21T05:33:18,008 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:33:18,009 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,009 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:33:18,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:33:18,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] starting 2018-07-21T05:33:18,010 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,010 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] Got response: 204 No Content 2018-07-21T05:33:18,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] response is [] 2018-07-21T05:33:18,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] starting 2018-07-21T05:33:18,011 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,011 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] Got response: 204 No Content 2018-07-21T05:33:18,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] response is [] 2018-07-21T05:33:18,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] starting 2018-07-21T05:33:18,012 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,012 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] Got response: 204 No Content 2018-07-21T05:33:18,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] response is [] 2018-07-21T05:33:18,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:33:18,013 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,013 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:33:18,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:33:18,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:33:18,014 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,014 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:33:18,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:33:18,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] starting 2018-07-21T05:33:18,015 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,015 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] Got response: 204 No Content 2018-07-21T05:33:18,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] response is [] 2018-07-21T05:33:18,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] starting 2018-07-21T05:33:18,016 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,016 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] Got response: 204 No Content 2018-07-21T05:33:18,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] response is [] 2018-07-21T05:33:18,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] starting 2018-07-21T05:33:18,017 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,017 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] Got response: 204 No Content 2018-07-21T05:33:18,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] response is [] 2018-07-21T05:33:18,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:33:18,018 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,018 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:33:18,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:33:18,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:33:18,018 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,018 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:33:18,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:33:18,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] starting 2018-07-21T05:33:18,019 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,019 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] Got response: 204 No Content 2018-07-21T05:33:18,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] response is [] 2018-07-21T05:33:18,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] starting 2018-07-21T05:33:18,022 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,022 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] Got response: 204 No Content 2018-07-21T05:33:18,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] response is [] 2018-07-21T05:33:18,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] starting 2018-07-21T05:33:18,023 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,023 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] Got response: 204 No Content 2018-07-21T05:33:18,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] response is [] 2018-07-21T05:33:18,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:33:18,023 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,024 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:33:18,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:33:18,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] starting 2018-07-21T05:33:18,024 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,024 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] Got response: 204 No Content 2018-07-21T05:33:18,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] response is [] 2018-07-21T05:33:18,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] starting 2018-07-21T05:33:18,026 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,026 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] Got response: 204 No Content 2018-07-21T05:33:18,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] response is [] 2018-07-21T05:33:18,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] starting 2018-07-21T05:33:18,026 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,027 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] Got response: 204 No Content 2018-07-21T05:33:18,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] response is [] 2018-07-21T05:33:18,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:33:18,027 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,027 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:33:18,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:33:18,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] starting 2018-07-21T05:33:18,028 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,028 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] Got response: 204 No Content 2018-07-21T05:33:18,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] response is [] 2018-07-21T05:33:18,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] starting 2018-07-21T05:33:18,029 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,029 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] Got response: 204 No Content 2018-07-21T05:33:18,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] response is [] 2018-07-21T05:33:18,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] starting 2018-07-21T05:33:18,030 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,030 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] Got response: 204 No Content 2018-07-21T05:33:18,030 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] response is [] 2018-07-21T05:33:18,030 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:33:18,031 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,031 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:33:18,031 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:33:18,031 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] starting 2018-07-21T05:33:18,031 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,031 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] Got response: 204 No Content 2018-07-21T05:33:18,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] response is [] 2018-07-21T05:33:18,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] starting 2018-07-21T05:33:18,032 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,032 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] Got response: 204 No Content 2018-07-21T05:33:18,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] response is [] 2018-07-21T05:33:18,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] starting 2018-07-21T05:33:18,033 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,033 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] Got response: 204 No Content 2018-07-21T05:33:18,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] response is [] 2018-07-21T05:33:18,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:33:18,034 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,034 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:33:18,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:33:18,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] starting 2018-07-21T05:33:18,035 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,035 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] Got response: 204 No Content 2018-07-21T05:33:18,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] response is [] 2018-07-21T05:33:18,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] starting 2018-07-21T05:33:18,035 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,035 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] Got response: 204 No Content 2018-07-21T05:33:18,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] response is [] 2018-07-21T05:33:18,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] starting 2018-07-21T05:33:18,036 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,036 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] Got response: 204 No Content 2018-07-21T05:33:18,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] response is [] 2018-07-21T05:33:18,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:33:18,037 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,037 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:33:18,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:33:18,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:33:18,038 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,038 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:33:18,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:33:18,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] starting 2018-07-21T05:33:18,038 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,038 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] Got response: 204 No Content 2018-07-21T05:33:18,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] response is [] 2018-07-21T05:33:18,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] starting 2018-07-21T05:33:18,039 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,039 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] Got response: 204 No Content 2018-07-21T05:33:18,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] response is [] 2018-07-21T05:33:18,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] starting 2018-07-21T05:33:18,040 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,040 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] Got response: 204 No Content 2018-07-21T05:33:18,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] response is [] 2018-07-21T05:33:18,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] starting 2018-07-21T05:33:18,041 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,041 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] Got response: 204 No Content 2018-07-21T05:33:18,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] response is [] 2018-07-21T05:33:18,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] starting 2018-07-21T05:33:18,042 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,042 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] Got response: 204 No Content 2018-07-21T05:33:18,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] response is [] 2018-07-21T05:33:18,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] starting 2018-07-21T05:33:18,043 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,043 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] Got response: 204 No Content 2018-07-21T05:33:18,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] response is [] 2018-07-21T05:33:18,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:33:18,043 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,043 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:33:18,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:33:18,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] starting 2018-07-21T05:33:18,044 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,044 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] Got response: 204 No Content 2018-07-21T05:33:18,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] response is [] 2018-07-21T05:33:18,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:33:18,045 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,045 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:33:18,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:33:18,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] starting 2018-07-21T05:33:18,046 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,046 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] Got response: 204 No Content 2018-07-21T05:33:18,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] response is [] 2018-07-21T05:33:18,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:33:18,047 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,047 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:33:18,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:33:18,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] starting 2018-07-21T05:33:18,047 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,047 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] Got response: 204 No Content 2018-07-21T05:33:18,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] response is [] 2018-07-21T05:33:18,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:33:18,048 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,048 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:33:18,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:33:18,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] starting 2018-07-21T05:33:18,049 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,049 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] Got response: 204 No Content 2018-07-21T05:33:18,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] response is [] 2018-07-21T05:33:18,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:33:18,050 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,050 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:33:18,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:33:18,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:33:18,052 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,052 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:33:18,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:33:18,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:33:18,053 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,053 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:33:18,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:33:18,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:33:18,054 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,054 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:33:18,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:33:18,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:33:18,055 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,055 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:33:18,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:33:18,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:33:18,056 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,056 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:33:18,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:33:18,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:33:18,056 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,056 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:33:18,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:33:18,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] starting 2018-07-21T05:33:18,057 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,057 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] Got response: 204 No Content 2018-07-21T05:33:18,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] response is [] 2018-07-21T05:33:18,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] starting 2018-07-21T05:33:18,058 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,058 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] Got response: 204 No Content 2018-07-21T05:33:18,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] response is [] 2018-07-21T05:33:18,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] starting 2018-07-21T05:33:18,059 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,059 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] Got response: 204 No Content 2018-07-21T05:33:18,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] response is [] 2018-07-21T05:33:18,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] starting 2018-07-21T05:33:18,064 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,064 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] Got response: 204 No Content 2018-07-21T05:33:18,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] response is [] 2018-07-21T05:33:18,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] starting 2018-07-21T05:33:18,066 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,066 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] Got response: 204 No Content 2018-07-21T05:33:18,066 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] response is [] 2018-07-21T05:33:18,066 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] starting 2018-07-21T05:33:18,067 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,067 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] Got response: 204 No Content 2018-07-21T05:33:18,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] response is [] 2018-07-21T05:33:18,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] starting 2018-07-21T05:33:18,068 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,068 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] Got response: 204 No Content 2018-07-21T05:33:18,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] response is [] 2018-07-21T05:33:18,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] starting 2018-07-21T05:33:18,069 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,069 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] Got response: 204 No Content 2018-07-21T05:33:18,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] response is [] 2018-07-21T05:33:18,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] starting 2018-07-21T05:33:18,069 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,070 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] Got response: 204 No Content 2018-07-21T05:33:18,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] response is [] 2018-07-21T05:33:18,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] starting 2018-07-21T05:33:18,070 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,070 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] Got response: 204 No Content 2018-07-21T05:33:18,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] response is [] 2018-07-21T05:33:18,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] starting 2018-07-21T05:33:18,071 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,071 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] Got response: 204 No Content 2018-07-21T05:33:18,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] response is [] 2018-07-21T05:33:18,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:33:18,072 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,072 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:33:18,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:33:18,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:33:18,073 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,073 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:33:18,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:33:18,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] starting 2018-07-21T05:33:18,074 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,074 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] Got response: 204 No Content 2018-07-21T05:33:18,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] response is [] 2018-07-21T05:33:18,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] starting 2018-07-21T05:33:18,075 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,075 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] Got response: 204 No Content 2018-07-21T05:33:18,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] response is [] 2018-07-21T05:33:18,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:33:18,076 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,076 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:33:18,076 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:33:18,076 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:33:18,077 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,077 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:33:18,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:33:18,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] starting 2018-07-21T05:33:18,078 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,078 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] Got response: 204 No Content 2018-07-21T05:33:18,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] response is [] 2018-07-21T05:33:18,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] starting 2018-07-21T05:33:18,079 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,079 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] Got response: 204 No Content 2018-07-21T05:33:18,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] response is [] 2018-07-21T05:33:18,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:33:18,080 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,080 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:33:18,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:33:18,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:33:18,081 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,081 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:33:18,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:33:18,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] starting 2018-07-21T05:33:18,082 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,082 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] Got response: 204 No Content 2018-07-21T05:33:18,082 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] response is [] 2018-07-21T05:33:18,082 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] starting 2018-07-21T05:33:18,083 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,083 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] Got response: 204 No Content 2018-07-21T05:33:18,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] response is [] 2018-07-21T05:33:18,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:33:18,084 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,084 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:33:18,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:33:18,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:33:18,085 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,085 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:33:18,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:33:18,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] starting 2018-07-21T05:33:18,086 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,086 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] Got response: 204 No Content 2018-07-21T05:33:18,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] response is [] 2018-07-21T05:33:18,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] starting 2018-07-21T05:33:18,086 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,086 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] Got response: 204 No Content 2018-07-21T05:33:18,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] response is [] 2018-07-21T05:33:18,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:33:18,087 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,087 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:33:18,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:33:18,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:33:18,088 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,088 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:33:18,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:33:18,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] starting 2018-07-21T05:33:18,089 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,089 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] Got response: 204 No Content 2018-07-21T05:33:18,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] response is [] 2018-07-21T05:33:18,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:33:18,090 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,090 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:33:18,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:33:18,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:33:18,090 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,090 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:33:18,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:33:18,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:33:18,091 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,091 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:33:18,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:33:18,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:33:18,092 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,092 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:33:18,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:33:18,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:33:18,093 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,093 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:33:18,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:33:18,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:33:18,094 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,094 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:33:18,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:33:18,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:33:18,095 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,095 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:33:18,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:33:18,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] starting 2018-07-21T05:33:18,096 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,096 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] Got response: 204 No Content 2018-07-21T05:33:18,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] response is [] 2018-07-21T05:33:18,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] starting 2018-07-21T05:33:18,096 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,096 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] Got response: 204 No Content 2018-07-21T05:33:18,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] response is [] 2018-07-21T05:33:18,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] starting 2018-07-21T05:33:18,097 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,097 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] Got response: 204 No Content 2018-07-21T05:33:18,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] response is [] 2018-07-21T05:33:18,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] starting 2018-07-21T05:33:18,098 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,098 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] Got response: 204 No Content 2018-07-21T05:33:18,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] response is [] 2018-07-21T05:33:18,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] starting 2018-07-21T05:33:18,099 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,099 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] Got response: 204 No Content 2018-07-21T05:33:18,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] response is [] 2018-07-21T05:33:18,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] starting 2018-07-21T05:33:18,100 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,100 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] Got response: 204 No Content 2018-07-21T05:33:18,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] response is [] 2018-07-21T05:33:18,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] starting 2018-07-21T05:33:18,101 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,101 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] Got response: 204 No Content 2018-07-21T05:33:18,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] response is [] 2018-07-21T05:33:18,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] starting 2018-07-21T05:33:18,102 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,102 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] Got response: 204 No Content 2018-07-21T05:33:18,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] response is [] 2018-07-21T05:33:18,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] starting 2018-07-21T05:33:18,103 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,103 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] Got response: 204 No Content 2018-07-21T05:33:18,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] response is [] 2018-07-21T05:33:18,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:33:18,104 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,104 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:33:18,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:33:18,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] starting 2018-07-21T05:33:18,105 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,105 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] Got response: 204 No Content 2018-07-21T05:33:18,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] response is [] 2018-07-21T05:33:18,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] starting 2018-07-21T05:33:18,107 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,107 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] Got response: 204 No Content 2018-07-21T05:33:18,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] response is [] 2018-07-21T05:33:18,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:33:18,108 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,108 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:33:18,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:33:18,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] starting 2018-07-21T05:33:18,109 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,109 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] Got response: 204 No Content 2018-07-21T05:33:18,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] response is [] 2018-07-21T05:33:18,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] starting 2018-07-21T05:33:18,110 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,110 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] Got response: 204 No Content 2018-07-21T05:33:18,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] response is [] 2018-07-21T05:33:18,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:33:18,111 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,111 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:33:18,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:33:18,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] starting 2018-07-21T05:33:18,112 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,112 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] Got response: 204 No Content 2018-07-21T05:33:18,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] response is [] 2018-07-21T05:33:18,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] starting 2018-07-21T05:33:18,113 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,113 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] Got response: 204 No Content 2018-07-21T05:33:18,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] response is [] 2018-07-21T05:33:18,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:33:18,113 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,113 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:33:18,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:33:18,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] starting 2018-07-21T05:33:18,114 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,114 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] Got response: 204 No Content 2018-07-21T05:33:18,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] response is [] 2018-07-21T05:33:18,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] starting 2018-07-21T05:33:18,116 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,116 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] Got response: 204 No Content 2018-07-21T05:33:18,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] response is [] 2018-07-21T05:33:18,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:33:18,117 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,117 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:33:18,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:33:18,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] starting 2018-07-21T05:33:18,118 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,118 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] Got response: 204 No Content 2018-07-21T05:33:18,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] response is [] 2018-07-21T05:33:18,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:33:18,118 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,118 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:33:18,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:33:18,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] starting 2018-07-21T05:33:18,119 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,119 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] Got response: 204 No Content 2018-07-21T05:33:18,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] response is [] 2018-07-21T05:33:18,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] starting 2018-07-21T05:33:18,120 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,120 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] Got response: 204 No Content 2018-07-21T05:33:18,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] response is [] 2018-07-21T05:33:18,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:33:18,121 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,121 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:33:18,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:33:18,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:33:18,122 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,122 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:33:18,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:33:18,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:33:18,123 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,123 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:33:18,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:33:18,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:33:18,124 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,124 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:33:18,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:33:18,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:33:18,125 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,125 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:33:18,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:33:18,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] starting 2018-07-21T05:33:18,126 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,126 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] Got response: 204 No Content 2018-07-21T05:33:18,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] response is [] 2018-07-21T05:33:18,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] starting 2018-07-21T05:33:18,127 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,127 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] Got response: 204 No Content 2018-07-21T05:33:18,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] response is [] 2018-07-21T05:33:18,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] starting 2018-07-21T05:33:18,127 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,127 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] Got response: 204 No Content 2018-07-21T05:33:18,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] response is [] 2018-07-21T05:33:18,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] starting 2018-07-21T05:33:18,128 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,128 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] Got response: 204 No Content 2018-07-21T05:33:18,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] response is [] 2018-07-21T05:33:18,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] starting 2018-07-21T05:33:18,129 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,129 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] Got response: 204 No Content 2018-07-21T05:33:18,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] response is [] 2018-07-21T05:33:18,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] starting 2018-07-21T05:33:18,130 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,130 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] Got response: 204 No Content 2018-07-21T05:33:18,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] response is [] 2018-07-21T05:33:18,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] starting 2018-07-21T05:33:18,131 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,131 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] Got response: 204 No Content 2018-07-21T05:33:18,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] response is [] 2018-07-21T05:33:18,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] starting 2018-07-21T05:33:18,131 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,131 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] Got response: 204 No Content 2018-07-21T05:33:18,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] response is [] 2018-07-21T05:33:18,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] starting 2018-07-21T05:33:18,132 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,132 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] Got response: 204 No Content 2018-07-21T05:33:18,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] response is [] 2018-07-21T05:33:18,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] starting 2018-07-21T05:33:18,133 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,133 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] Got response: 204 No Content 2018-07-21T05:33:18,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] response is [] 2018-07-21T05:33:18,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] starting 2018-07-21T05:33:18,134 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,134 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] Got response: 204 No Content 2018-07-21T05:33:18,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] response is [] 2018-07-21T05:33:18,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] starting 2018-07-21T05:33:18,134 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,134 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] Got response: 204 No Content 2018-07-21T05:33:18,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] response is [] 2018-07-21T05:33:18,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:33:18,135 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,135 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:33:18,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:33:18,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] starting 2018-07-21T05:33:18,136 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,136 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] Got response: 204 No Content 2018-07-21T05:33:18,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] response is [] 2018-07-21T05:33:18,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] starting 2018-07-21T05:33:18,137 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,137 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] Got response: 204 No Content 2018-07-21T05:33:18,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] response is [] 2018-07-21T05:33:18,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] starting 2018-07-21T05:33:18,138 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,138 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] Got response: 204 No Content 2018-07-21T05:33:18,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] response is [] 2018-07-21T05:33:18,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:33:18,138 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,138 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:33:18,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:33:18,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] starting 2018-07-21T05:33:18,139 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,139 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] Got response: 204 No Content 2018-07-21T05:33:18,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] response is [] 2018-07-21T05:33:18,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] starting 2018-07-21T05:33:18,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] Got response: 204 No Content 2018-07-21T05:33:18,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] response is [] 2018-07-21T05:33:18,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] starting 2018-07-21T05:33:18,141 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,141 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] Got response: 204 No Content 2018-07-21T05:33:18,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] response is [] 2018-07-21T05:33:18,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:33:18,142 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,142 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:33:18,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:33:18,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] starting 2018-07-21T05:33:18,143 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,143 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] Got response: 204 No Content 2018-07-21T05:33:18,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] response is [] 2018-07-21T05:33:18,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] starting 2018-07-21T05:33:18,144 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,144 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] Got response: 204 No Content 2018-07-21T05:33:18,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] response is [] 2018-07-21T05:33:18,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] starting 2018-07-21T05:33:18,145 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,145 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] Got response: 204 No Content 2018-07-21T05:33:18,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] response is [] 2018-07-21T05:33:18,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:33:18,145 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,145 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:33:18,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:33:18,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] starting 2018-07-21T05:33:18,146 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,146 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] Got response: 204 No Content 2018-07-21T05:33:18,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] response is [] 2018-07-21T05:33:18,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] starting 2018-07-21T05:33:18,147 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,147 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] Got response: 204 No Content 2018-07-21T05:33:18,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] response is [] 2018-07-21T05:33:18,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] starting 2018-07-21T05:33:18,148 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,148 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] Got response: 204 No Content 2018-07-21T05:33:18,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] response is [] 2018-07-21T05:33:18,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] starting 2018-07-21T05:33:18,148 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,149 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] Got response: 204 No Content 2018-07-21T05:33:18,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] response is [] 2018-07-21T05:33:18,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] starting 2018-07-21T05:33:18,149 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,149 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] Got response: 204 No Content 2018-07-21T05:33:18,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] response is [] 2018-07-21T05:33:18,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:33:18,150 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,150 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:33:18,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:33:18,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] starting 2018-07-21T05:33:18,151 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,151 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] Got response: 204 No Content 2018-07-21T05:33:18,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] response is [] 2018-07-21T05:33:18,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] starting 2018-07-21T05:33:18,152 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,152 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] Got response: 204 No Content 2018-07-21T05:33:18,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] response is [] 2018-07-21T05:33:18,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] starting 2018-07-21T05:33:18,152 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,152 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] Got response: 204 No Content 2018-07-21T05:33:18,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] response is [] 2018-07-21T05:33:18,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] starting 2018-07-21T05:33:18,153 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,153 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] Got response: 204 No Content 2018-07-21T05:33:18,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] response is [] 2018-07-21T05:33:18,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] starting 2018-07-21T05:33:18,154 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,154 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] Got response: 204 No Content 2018-07-21T05:33:18,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] response is [] 2018-07-21T05:33:18,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] starting 2018-07-21T05:33:18,154 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,154 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] Got response: 204 No Content 2018-07-21T05:33:18,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] response is [] 2018-07-21T05:33:18,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] starting 2018-07-21T05:33:18,155 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,155 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] Got response: 204 No Content 2018-07-21T05:33:18,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] response is [] 2018-07-21T05:33:18,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] starting 2018-07-21T05:33:18,156 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,156 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] Got response: 204 No Content 2018-07-21T05:33:18,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] response is [] 2018-07-21T05:33:18,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] starting 2018-07-21T05:33:18,156 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,156 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] Got response: 204 No Content 2018-07-21T05:33:18,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] response is [] 2018-07-21T05:33:18,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] starting 2018-07-21T05:33:18,157 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,157 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] Got response: 204 No Content 2018-07-21T05:33:18,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] response is [] 2018-07-21T05:33:18,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] starting 2018-07-21T05:33:18,158 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,158 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] Got response: 204 No Content 2018-07-21T05:33:18,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] response is [] 2018-07-21T05:33:18,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] starting 2018-07-21T05:33:18,159 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,159 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] Got response: 204 No Content 2018-07-21T05:33:18,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] response is [] 2018-07-21T05:33:18,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] starting 2018-07-21T05:33:18,159 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,159 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] Got response: 204 No Content 2018-07-21T05:33:18,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] response is [] 2018-07-21T05:33:18,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:33:18,160 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,160 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:33:18,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:33:18,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] starting 2018-07-21T05:33:18,161 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,161 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] Got response: 204 No Content 2018-07-21T05:33:18,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] response is [] 2018-07-21T05:33:18,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] starting 2018-07-21T05:33:18,162 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,162 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] Got response: 204 No Content 2018-07-21T05:33:18,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] response is [] 2018-07-21T05:33:18,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] starting 2018-07-21T05:33:18,162 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,162 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] Got response: 204 No Content 2018-07-21T05:33:18,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] response is [] 2018-07-21T05:33:18,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] starting 2018-07-21T05:33:18,163 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,163 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] Got response: 204 No Content 2018-07-21T05:33:18,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] response is [] 2018-07-21T05:33:18,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] starting 2018-07-21T05:33:18,164 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,164 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] Got response: 204 No Content 2018-07-21T05:33:18,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] response is [] 2018-07-21T05:33:18,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] starting 2018-07-21T05:33:18,165 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,165 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] Got response: 204 No Content 2018-07-21T05:33:18,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] response is [] 2018-07-21T05:33:18,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] starting 2018-07-21T05:33:18,166 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,166 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] Got response: 204 No Content 2018-07-21T05:33:18,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] response is [] 2018-07-21T05:33:18,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] starting 2018-07-21T05:33:18,166 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,166 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] Got response: 204 No Content 2018-07-21T05:33:18,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] response is [] 2018-07-21T05:33:18,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] starting 2018-07-21T05:33:18,167 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,167 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] Got response: 204 No Content 2018-07-21T05:33:18,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] response is [] 2018-07-21T05:33:18,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] starting 2018-07-21T05:33:18,168 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,168 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] Got response: 204 No Content 2018-07-21T05:33:18,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] response is [] 2018-07-21T05:33:18,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] starting 2018-07-21T05:33:18,169 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,169 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] Got response: 204 No Content 2018-07-21T05:33:18,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] response is [] 2018-07-21T05:33:18,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] starting 2018-07-21T05:33:18,170 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,170 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] Got response: 204 No Content 2018-07-21T05:33:18,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] response is [] 2018-07-21T05:33:18,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] starting 2018-07-21T05:33:18,171 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,171 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] Got response: 204 No Content 2018-07-21T05:33:18,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] response is [] 2018-07-21T05:33:18,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] starting 2018-07-21T05:33:18,172 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,172 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] Got response: 204 No Content 2018-07-21T05:33:18,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] response is [] 2018-07-21T05:33:18,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] starting 2018-07-21T05:33:18,172 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,172 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] Got response: 204 No Content 2018-07-21T05:33:18,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] response is [] 2018-07-21T05:33:18,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] starting 2018-07-21T05:33:18,173 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,173 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] Got response: 204 No Content 2018-07-21T05:33:18,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] response is [] 2018-07-21T05:33:18,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] starting 2018-07-21T05:33:18,174 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,174 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] Got response: 204 No Content 2018-07-21T05:33:18,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] response is [] 2018-07-21T05:33:18,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:33:18,175 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,175 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:33:18,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:33:18,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:33:18,176 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,176 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:33:18,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:33:18,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:33:18,176 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,176 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:33:18,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:33:18,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] starting 2018-07-21T05:33:18,177 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,177 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] Got response: 204 No Content 2018-07-21T05:33:18,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] response is [] 2018-07-21T05:33:18,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] starting 2018-07-21T05:33:18,178 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,178 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] Got response: 204 No Content 2018-07-21T05:33:18,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] response is [] 2018-07-21T05:33:18,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] starting 2018-07-21T05:33:18,179 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,179 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] Got response: 204 No Content 2018-07-21T05:33:18,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] response is [] 2018-07-21T05:33:18,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:33:18,180 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,180 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:33:18,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:33:18,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:33:18,181 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,181 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:33:18,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:33:18,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:33:18,181 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,182 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:33:18,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:33:18,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:33:18,182 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,182 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:33:18,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:33:18,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:33:18,183 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,183 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:33:18,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:33:18,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:33:18,184 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,184 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:33:18,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:33:18,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:33:18,185 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,185 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:33:18,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:33:18,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:33:18,186 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,186 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:33:18,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:33:18,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:33:18,186 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,186 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:33:18,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:33:18,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:33:18,187 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,187 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:33:18,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:33:18,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:33:18,188 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,188 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:33:18,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:33:18,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:33:18,189 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,189 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:33:18,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:33:18,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:33:18,190 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,190 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:33:18,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:33:18,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:33:18,191 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,191 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:33:18,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:33:18,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] starting 2018-07-21T05:33:18,192 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,192 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] Got response: 204 No Content 2018-07-21T05:33:18,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] response is [] 2018-07-21T05:33:18,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] starting 2018-07-21T05:33:18,193 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,193 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] Got response: 204 No Content 2018-07-21T05:33:18,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] response is [] 2018-07-21T05:33:18,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] starting 2018-07-21T05:33:18,194 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,194 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] Got response: 204 No Content 2018-07-21T05:33:18,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] response is [] 2018-07-21T05:33:18,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:33:18,194 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,195 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:33:18,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:33:18,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] starting 2018-07-21T05:33:18,195 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,195 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] Got response: 204 No Content 2018-07-21T05:33:18,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] response is [] 2018-07-21T05:33:18,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] starting 2018-07-21T05:33:18,196 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,196 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] Got response: 204 No Content 2018-07-21T05:33:18,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] response is [] 2018-07-21T05:33:18,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] starting 2018-07-21T05:33:18,197 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,197 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] Got response: 204 No Content 2018-07-21T05:33:18,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] response is [] 2018-07-21T05:33:18,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] starting 2018-07-21T05:33:18,198 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,198 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] Got response: 204 No Content 2018-07-21T05:33:18,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] response is [] 2018-07-21T05:33:18,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] starting 2018-07-21T05:33:18,199 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,199 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] Got response: 204 No Content 2018-07-21T05:33:18,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] response is [] 2018-07-21T05:33:18,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] starting 2018-07-21T05:33:18,200 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,200 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] Got response: 204 No Content 2018-07-21T05:33:18,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] response is [] 2018-07-21T05:33:18,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:33:18,200 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,200 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:33:18,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:33:18,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:33:18,201 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,201 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:33:18,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:33:18,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:33:18,202 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,202 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:33:18,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:33:18,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:33:18,203 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,203 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:33:18,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:33:18,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:33:18,203 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,203 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:33:18,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:33:18,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:33:18,204 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,204 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:33:18,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:33:18,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:33:18,205 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,205 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:33:18,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:33:18,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:33:18,206 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,206 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:33:18,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:33:18,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] starting 2018-07-21T05:33:18,207 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,207 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] Got response: 204 No Content 2018-07-21T05:33:18,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] response is [] 2018-07-21T05:33:18,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] starting 2018-07-21T05:33:18,207 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,207 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] Got response: 204 No Content 2018-07-21T05:33:18,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] response is [] 2018-07-21T05:33:18,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] starting 2018-07-21T05:33:18,208 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,208 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] Got response: 204 No Content 2018-07-21T05:33:18,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] response is [] 2018-07-21T05:33:18,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] starting 2018-07-21T05:33:18,209 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,209 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] Got response: 204 No Content 2018-07-21T05:33:18,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] response is [] 2018-07-21T05:33:18,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] starting 2018-07-21T05:33:18,210 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,210 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] Got response: 204 No Content 2018-07-21T05:33:18,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] response is [] 2018-07-21T05:33:18,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] starting 2018-07-21T05:33:18,211 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,211 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] Got response: 204 No Content 2018-07-21T05:33:18,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] response is [] 2018-07-21T05:33:18,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:33:18,212 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,212 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:33:18,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:33:18,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] starting 2018-07-21T05:33:18,212 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,213 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] Got response: 204 No Content 2018-07-21T05:33:18,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] response is [] 2018-07-21T05:33:18,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] starting 2018-07-21T05:33:18,213 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,213 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] Got response: 204 No Content 2018-07-21T05:33:18,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] response is [] 2018-07-21T05:33:18,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] starting 2018-07-21T05:33:18,214 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,214 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] Got response: 204 No Content 2018-07-21T05:33:18,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] response is [] 2018-07-21T05:33:18,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:33:18,215 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,215 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:33:18,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:33:18,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] starting 2018-07-21T05:33:18,216 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,216 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] Got response: 204 No Content 2018-07-21T05:33:18,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] response is [] 2018-07-21T05:33:18,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] starting 2018-07-21T05:33:18,216 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,216 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] Got response: 204 No Content 2018-07-21T05:33:18,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] response is [] 2018-07-21T05:33:18,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] starting 2018-07-21T05:33:18,217 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,217 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] Got response: 204 No Content 2018-07-21T05:33:18,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] response is [] 2018-07-21T05:33:18,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:33:18,218 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,218 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:33:18,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:33:18,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] starting 2018-07-21T05:33:18,219 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,219 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] Got response: 204 No Content 2018-07-21T05:33:18,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] response is [] 2018-07-21T05:33:18,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] starting 2018-07-21T05:33:18,220 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,220 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] Got response: 204 No Content 2018-07-21T05:33:18,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] response is [] 2018-07-21T05:33:18,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] starting 2018-07-21T05:33:18,221 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,221 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] Got response: 204 No Content 2018-07-21T05:33:18,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] response is [] 2018-07-21T05:33:18,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:33:18,222 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,222 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:33:18,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:33:18,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] starting 2018-07-21T05:33:18,223 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,223 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] Got response: 204 No Content 2018-07-21T05:33:18,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] response is [] 2018-07-21T05:33:18,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] starting 2018-07-21T05:33:18,224 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,224 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] Got response: 204 No Content 2018-07-21T05:33:18,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] response is [] 2018-07-21T05:33:18,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] starting 2018-07-21T05:33:18,225 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,225 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] Got response: 204 No Content 2018-07-21T05:33:18,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] response is [] 2018-07-21T05:33:18,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:33:18,226 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,226 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:33:18,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:33:18,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] starting 2018-07-21T05:33:18,226 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,226 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] Got response: 204 No Content 2018-07-21T05:33:18,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] response is [] 2018-07-21T05:33:18,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] starting 2018-07-21T05:33:18,227 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,227 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] Got response: 204 No Content 2018-07-21T05:33:18,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] response is [] 2018-07-21T05:33:18,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] starting 2018-07-21T05:33:18,228 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,228 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] Got response: 204 No Content 2018-07-21T05:33:18,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] response is [] 2018-07-21T05:33:18,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:33:18,229 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,229 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:33:18,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:33:18,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] starting 2018-07-21T05:33:18,230 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,230 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] Got response: 204 No Content 2018-07-21T05:33:18,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] response is [] 2018-07-21T05:33:18,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] starting 2018-07-21T05:33:18,231 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,231 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] Got response: 204 No Content 2018-07-21T05:33:18,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] response is [] 2018-07-21T05:33:18,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] starting 2018-07-21T05:33:18,232 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,232 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] Got response: 204 No Content 2018-07-21T05:33:18,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] response is [] 2018-07-21T05:33:18,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] starting 2018-07-21T05:33:18,232 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,232 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] Got response: 204 No Content 2018-07-21T05:33:18,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] response is [] 2018-07-21T05:33:18,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] starting 2018-07-21T05:33:18,233 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,233 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] Got response: 204 No Content 2018-07-21T05:33:18,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] response is [] 2018-07-21T05:33:18,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] starting 2018-07-21T05:33:18,234 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,234 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] Got response: 204 No Content 2018-07-21T05:33:18,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] response is [] 2018-07-21T05:33:18,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:33:18,235 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,235 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:33:18,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:33:18,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] starting 2018-07-21T05:33:18,235 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,235 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] Got response: 204 No Content 2018-07-21T05:33:18,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] response is [] 2018-07-21T05:33:18,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] starting 2018-07-21T05:33:18,236 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,236 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] Got response: 204 No Content 2018-07-21T05:33:18,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] response is [] 2018-07-21T05:33:18,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] starting 2018-07-21T05:33:18,237 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,237 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] Got response: 204 No Content 2018-07-21T05:33:18,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] response is [] 2018-07-21T05:33:18,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] starting 2018-07-21T05:33:18,238 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,238 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] Got response: 204 No Content 2018-07-21T05:33:18,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] response is [] 2018-07-21T05:33:18,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] starting 2018-07-21T05:33:18,239 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,239 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] Got response: 204 No Content 2018-07-21T05:33:18,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] response is [] 2018-07-21T05:33:18,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] starting 2018-07-21T05:33:18,240 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,240 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] Got response: 204 No Content 2018-07-21T05:33:18,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] response is [] 2018-07-21T05:33:18,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] starting 2018-07-21T05:33:18,240 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,240 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] Got response: 204 No Content 2018-07-21T05:33:18,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] response is [] 2018-07-21T05:33:18,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] starting 2018-07-21T05:33:18,241 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,241 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] Got response: 204 No Content 2018-07-21T05:33:18,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] response is [] 2018-07-21T05:33:18,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:33:18,242 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,242 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:33:18,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:33:18,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:33:18,243 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,243 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:33:18,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:33:18,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] starting 2018-07-21T05:33:18,243 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,243 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] Got response: 204 No Content 2018-07-21T05:33:18,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] response is [] 2018-07-21T05:33:18,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:33:18,244 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,244 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:33:18,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:33:18,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:33:18,245 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,245 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:33:18,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:33:18,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:33:18,246 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,246 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:33:18,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:33:18,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:33:18,247 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,247 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:33:18,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:33:18,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:33:18,247 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,247 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:33:18,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:33:18,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:33:18,248 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,248 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:33:18,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:33:18,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:33:18,249 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,249 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:33:18,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:33:18,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] starting 2018-07-21T05:33:18,250 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,250 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] Got response: 204 No Content 2018-07-21T05:33:18,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] response is [] 2018-07-21T05:33:18,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] starting 2018-07-21T05:33:18,250 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,250 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] Got response: 204 No Content 2018-07-21T05:33:18,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] response is [] 2018-07-21T05:33:18,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] starting 2018-07-21T05:33:18,251 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,251 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] Got response: 204 No Content 2018-07-21T05:33:18,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] response is [] 2018-07-21T05:33:18,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] starting 2018-07-21T05:33:18,252 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,252 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] Got response: 204 No Content 2018-07-21T05:33:18,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] response is [] 2018-07-21T05:33:18,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] starting 2018-07-21T05:33:18,252 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,252 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] Got response: 204 No Content 2018-07-21T05:33:18,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] response is [] 2018-07-21T05:33:18,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] starting 2018-07-21T05:33:18,253 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,253 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] Got response: 204 No Content 2018-07-21T05:33:18,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] response is [] 2018-07-21T05:33:18,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] starting 2018-07-21T05:33:18,254 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,254 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] Got response: 204 No Content 2018-07-21T05:33:18,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] response is [] 2018-07-21T05:33:18,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] starting 2018-07-21T05:33:18,255 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,255 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] Got response: 204 No Content 2018-07-21T05:33:18,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] response is [] 2018-07-21T05:33:18,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] starting 2018-07-21T05:33:18,256 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,256 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] Got response: 204 No Content 2018-07-21T05:33:18,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] response is [] 2018-07-21T05:33:18,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] starting 2018-07-21T05:33:18,256 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,256 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] Got response: 204 No Content 2018-07-21T05:33:18,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] response is [] 2018-07-21T05:33:18,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] starting 2018-07-21T05:33:18,257 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,257 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] Got response: 204 No Content 2018-07-21T05:33:18,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] response is [] 2018-07-21T05:33:18,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] starting 2018-07-21T05:33:18,258 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,258 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] Got response: 204 No Content 2018-07-21T05:33:18,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] response is [] 2018-07-21T05:33:18,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] starting 2018-07-21T05:33:18,259 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,259 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] Got response: 204 No Content 2018-07-21T05:33:18,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] response is [] 2018-07-21T05:33:18,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] starting 2018-07-21T05:33:18,259 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,259 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] Got response: 204 No Content 2018-07-21T05:33:18,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] response is [] 2018-07-21T05:33:18,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] starting 2018-07-21T05:33:18,260 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,260 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] Got response: 204 No Content 2018-07-21T05:33:18,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] response is [] 2018-07-21T05:33:18,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] starting 2018-07-21T05:33:18,261 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,261 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] Got response: 204 No Content 2018-07-21T05:33:18,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] response is [] 2018-07-21T05:33:18,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] starting 2018-07-21T05:33:18,262 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,262 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] Got response: 204 No Content 2018-07-21T05:33:18,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] response is [] 2018-07-21T05:33:18,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] starting 2018-07-21T05:33:18,263 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,263 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] Got response: 204 No Content 2018-07-21T05:33:18,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] response is [] 2018-07-21T05:33:18,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] starting 2018-07-21T05:33:18,263 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,263 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] Got response: 204 No Content 2018-07-21T05:33:18,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] response is [] 2018-07-21T05:33:18,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] starting 2018-07-21T05:33:18,264 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,264 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] Got response: 204 No Content 2018-07-21T05:33:18,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] response is [] 2018-07-21T05:33:18,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] starting 2018-07-21T05:33:18,265 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,265 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] Got response: 204 No Content 2018-07-21T05:33:18,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] response is [] 2018-07-21T05:33:18,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] starting 2018-07-21T05:33:18,266 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,266 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] Got response: 204 No Content 2018-07-21T05:33:18,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] response is [] 2018-07-21T05:33:18,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] starting 2018-07-21T05:33:18,267 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,267 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] Got response: 204 No Content 2018-07-21T05:33:18,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] response is [] 2018-07-21T05:33:18,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] starting 2018-07-21T05:33:18,267 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,267 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] Got response: 204 No Content 2018-07-21T05:33:18,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] response is [] 2018-07-21T05:33:18,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] starting 2018-07-21T05:33:18,268 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,268 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] Got response: 204 No Content 2018-07-21T05:33:18,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] response is [] 2018-07-21T05:33:18,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] starting 2018-07-21T05:33:18,269 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,269 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] Got response: 204 No Content 2018-07-21T05:33:18,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] response is [] 2018-07-21T05:33:18,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] starting 2018-07-21T05:33:18,270 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,270 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] Got response: 204 No Content 2018-07-21T05:33:18,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] response is [] 2018-07-21T05:33:18,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] starting 2018-07-21T05:33:18,270 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,270 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] Got response: 204 No Content 2018-07-21T05:33:18,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] response is [] 2018-07-21T05:33:18,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] starting 2018-07-21T05:33:18,271 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,271 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] Got response: 204 No Content 2018-07-21T05:33:18,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] response is [] 2018-07-21T05:33:18,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:33:18,272 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,272 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:33:18,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:33:18,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:33:18,273 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,273 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:33:18,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:33:18,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] starting 2018-07-21T05:33:18,274 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,274 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] Got response: 204 No Content 2018-07-21T05:33:18,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] response is [] 2018-07-21T05:33:18,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:33:18,275 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,275 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:33:18,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:33:18,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:33:18,276 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,276 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:33:18,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:33:18,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:33:18,276 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,276 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:33:18,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:33:18,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:33:18,277 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,277 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:33:18,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:33:18,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:33:18,278 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,278 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:33:18,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:33:18,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:33:18,279 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,279 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:33:18,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:33:18,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:33:18,280 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,280 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:33:18,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:33:18,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:33:18,281 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,281 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:33:18,281 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:33:18,281 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] starting 2018-07-21T05:33:18,281 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,281 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] Got response: 204 No Content 2018-07-21T05:33:18,281 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] response is [] 2018-07-21T05:33:18,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] starting 2018-07-21T05:33:18,282 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,282 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] Got response: 204 No Content 2018-07-21T05:33:18,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] response is [] 2018-07-21T05:33:18,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] starting 2018-07-21T05:33:18,283 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,283 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] Got response: 204 No Content 2018-07-21T05:33:18,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] response is [] 2018-07-21T05:33:18,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] starting 2018-07-21T05:33:18,284 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,284 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] Got response: 204 No Content 2018-07-21T05:33:18,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] response is [] 2018-07-21T05:33:18,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] starting 2018-07-21T05:33:18,285 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,285 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] Got response: 204 No Content 2018-07-21T05:33:18,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] response is [] 2018-07-21T05:33:18,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] starting 2018-07-21T05:33:18,286 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,286 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] Got response: 204 No Content 2018-07-21T05:33:18,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] response is [] 2018-07-21T05:33:18,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] starting 2018-07-21T05:33:18,287 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,287 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] Got response: 204 No Content 2018-07-21T05:33:18,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] response is [] 2018-07-21T05:33:18,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] starting 2018-07-21T05:33:18,288 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,288 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] Got response: 204 No Content 2018-07-21T05:33:18,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] response is [] 2018-07-21T05:33:18,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] starting 2018-07-21T05:33:18,289 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,289 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] Got response: 204 No Content 2018-07-21T05:33:18,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] response is [] 2018-07-21T05:33:18,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] starting 2018-07-21T05:33:18,289 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,289 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] Got response: 204 No Content 2018-07-21T05:33:18,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] response is [] 2018-07-21T05:33:18,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] starting 2018-07-21T05:33:18,290 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,290 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] Got response: 204 No Content 2018-07-21T05:33:18,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] response is [] 2018-07-21T05:33:18,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] starting 2018-07-21T05:33:18,291 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,291 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] Got response: 204 No Content 2018-07-21T05:33:18,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] response is [] 2018-07-21T05:33:18,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] starting 2018-07-21T05:33:18,292 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,292 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] Got response: 204 No Content 2018-07-21T05:33:18,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] response is [] 2018-07-21T05:33:18,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] starting 2018-07-21T05:33:18,293 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,293 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] Got response: 204 No Content 2018-07-21T05:33:18,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] response is [] 2018-07-21T05:33:18,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] starting 2018-07-21T05:33:18,294 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,294 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] Got response: 204 No Content 2018-07-21T05:33:18,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] response is [] 2018-07-21T05:33:18,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] starting 2018-07-21T05:33:18,294 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,294 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] Got response: 204 No Content 2018-07-21T05:33:18,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] response is [] 2018-07-21T05:33:18,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] starting 2018-07-21T05:33:18,295 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,295 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] Got response: 204 No Content 2018-07-21T05:33:18,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] response is [] 2018-07-21T05:33:18,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] starting 2018-07-21T05:33:18,296 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,296 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] Got response: 204 No Content 2018-07-21T05:33:18,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] response is [] 2018-07-21T05:33:18,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] starting 2018-07-21T05:33:18,300 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,300 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] Got response: 204 No Content 2018-07-21T05:33:18,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] response is [] 2018-07-21T05:33:18,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] starting 2018-07-21T05:33:18,301 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,301 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] Got response: 204 No Content 2018-07-21T05:33:18,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] response is [] 2018-07-21T05:33:18,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] starting 2018-07-21T05:33:18,302 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,302 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] Got response: 204 No Content 2018-07-21T05:33:18,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] response is [] 2018-07-21T05:33:18,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] starting 2018-07-21T05:33:18,303 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,303 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] Got response: 204 No Content 2018-07-21T05:33:18,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] response is [] 2018-07-21T05:33:18,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] starting 2018-07-21T05:33:18,304 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,304 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] Got response: 204 No Content 2018-07-21T05:33:18,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] response is [] 2018-07-21T05:33:18,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] starting 2018-07-21T05:33:18,306 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,306 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] Got response: 204 No Content 2018-07-21T05:33:18,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] response is [] 2018-07-21T05:33:18,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] starting 2018-07-21T05:33:18,307 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,307 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] Got response: 204 No Content 2018-07-21T05:33:18,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] response is [] 2018-07-21T05:33:18,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] starting 2018-07-21T05:33:18,307 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,307 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] Got response: 204 No Content 2018-07-21T05:33:18,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] response is [] 2018-07-21T05:33:18,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:33:18,308 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,308 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:33:18,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:33:18,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] starting 2018-07-21T05:33:18,309 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,309 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] Got response: 204 No Content 2018-07-21T05:33:18,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] response is [] 2018-07-21T05:33:18,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:33:18,310 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,310 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:33:18,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:33:18,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] starting 2018-07-21T05:33:18,311 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,311 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] Got response: 204 No Content 2018-07-21T05:33:18,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] response is [] 2018-07-21T05:33:18,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] starting 2018-07-21T05:33:18,311 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,311 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] Got response: 204 No Content 2018-07-21T05:33:18,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] response is [] 2018-07-21T05:33:18,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:33:18,312 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,312 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:33:18,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:33:18,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:33:18,313 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,313 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:33:18,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:33:18,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:33:18,314 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,314 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:33:18,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:33:18,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:33:18,315 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,315 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:33:18,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:33:18,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:33:18,315 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,315 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:33:18,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:33:18,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:33:18,316 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,316 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:33:18,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:33:18,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:33:18,317 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,317 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:33:18,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:33:18,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:33:18,318 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,318 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:33:18,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:33:18,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] starting 2018-07-21T05:33:18,318 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,318 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] Got response: 204 No Content 2018-07-21T05:33:18,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] response is [] 2018-07-21T05:33:18,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] starting 2018-07-21T05:33:18,319 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,319 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] Got response: 204 No Content 2018-07-21T05:33:18,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] response is [] 2018-07-21T05:33:18,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] starting 2018-07-21T05:33:18,320 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,320 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] Got response: 204 No Content 2018-07-21T05:33:18,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] response is [] 2018-07-21T05:33:18,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] starting 2018-07-21T05:33:18,321 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,321 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] Got response: 204 No Content 2018-07-21T05:33:18,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] response is [] 2018-07-21T05:33:18,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] starting 2018-07-21T05:33:18,322 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,322 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] Got response: 204 No Content 2018-07-21T05:33:18,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] response is [] 2018-07-21T05:33:18,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] starting 2018-07-21T05:33:18,322 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,322 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] Got response: 204 No Content 2018-07-21T05:33:18,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] response is [] 2018-07-21T05:33:18,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] starting 2018-07-21T05:33:18,323 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,323 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] Got response: 204 No Content 2018-07-21T05:33:18,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] response is [] 2018-07-21T05:33:18,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] starting 2018-07-21T05:33:18,324 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,324 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] Got response: 204 No Content 2018-07-21T05:33:18,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] response is [] 2018-07-21T05:33:18,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] starting 2018-07-21T05:33:18,325 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,325 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] Got response: 204 No Content 2018-07-21T05:33:18,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] response is [] 2018-07-21T05:33:18,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] starting 2018-07-21T05:33:18,326 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,326 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] Got response: 204 No Content 2018-07-21T05:33:18,326 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] response is [] 2018-07-21T05:33:18,326 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] starting 2018-07-21T05:33:18,327 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,327 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] Got response: 204 No Content 2018-07-21T05:33:18,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] response is [] 2018-07-21T05:33:18,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:33:18,327 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,327 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:33:18,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:33:18,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:33:18,328 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,328 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:33:18,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:33:18,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:33:18,329 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,329 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:33:18,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:33:18,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:33:18,330 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,330 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:33:18,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:33:18,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:33:18,331 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,331 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:33:18,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:33:18,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:33:18,332 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,332 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:33:18,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:33:18,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:33:18,332 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,332 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:33:18,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:33:18,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:33:18,333 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,333 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:33:18,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:33:18,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:33:18,334 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,334 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:33:18,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:33:18,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:33:18,335 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,335 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:33:18,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:33:18,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:33:18,336 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,336 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:33:18,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:33:18,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:33:18,336 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,336 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:33:18,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:33:18,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:33:18,337 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,337 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:33:18,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:33:18,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:33:18,338 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,338 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:33:18,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:33:18,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:33:18,339 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,339 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:33:18,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:33:18,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:33:18,340 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,340 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:33:18,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:33:18,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:33:18,341 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,341 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:33:18,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:33:18,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:33:18,342 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,342 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:33:18,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:33:18,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:33:18,343 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,343 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:33:18,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:33:18,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] starting 2018-07-21T05:33:18,343 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,343 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] Got response: 204 No Content 2018-07-21T05:33:18,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] response is [] 2018-07-21T05:33:18,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] starting 2018-07-21T05:33:18,344 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,344 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] Got response: 204 No Content 2018-07-21T05:33:18,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] response is [] 2018-07-21T05:33:18,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] starting 2018-07-21T05:33:18,345 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,345 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] Got response: 204 No Content 2018-07-21T05:33:18,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] response is [] 2018-07-21T05:33:18,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] starting 2018-07-21T05:33:18,347 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,347 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] Got response: 204 No Content 2018-07-21T05:33:18,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] response is [] 2018-07-21T05:33:18,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] starting 2018-07-21T05:33:18,347 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,348 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] Got response: 204 No Content 2018-07-21T05:33:18,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] response is [] 2018-07-21T05:33:18,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] starting 2018-07-21T05:33:18,348 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,348 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] Got response: 204 No Content 2018-07-21T05:33:18,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] response is [] 2018-07-21T05:33:18,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] starting 2018-07-21T05:33:18,349 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,349 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] Got response: 204 No Content 2018-07-21T05:33:18,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] response is [] 2018-07-21T05:33:18,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] starting 2018-07-21T05:33:18,350 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,350 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] Got response: 204 No Content 2018-07-21T05:33:18,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] response is [] 2018-07-21T05:33:18,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] starting 2018-07-21T05:33:18,351 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,351 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] Got response: 204 No Content 2018-07-21T05:33:18,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] response is [] 2018-07-21T05:33:18,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] starting 2018-07-21T05:33:18,351 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,351 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] Got response: 204 No Content 2018-07-21T05:33:18,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] response is [] 2018-07-21T05:33:18,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:33:18,352 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,352 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:33:18,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:33:18,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:33:18,353 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,353 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:33:18,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:33:18,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:33:18,354 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,354 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:33:18,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:33:18,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:33:18,355 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,355 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:33:18,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:33:18,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:33:18,355 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,355 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:33:18,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:33:18,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:33:18,356 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,356 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:33:18,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:33:18,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:33:18,357 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,357 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:33:18,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:33:18,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:33:18,358 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,358 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:33:18,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:33:18,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:33:18,359 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,359 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:33:18,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:33:18,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:33:18,359 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,359 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:33:18,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:33:18,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:33:18,360 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,360 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:33:18,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:33:18,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:33:18,361 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,361 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:33:18,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:33:18,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:33:18,361 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,361 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:33:18,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:33:18,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:33:18,362 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,362 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:33:18,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:33:18,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:33:18,363 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,363 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:33:18,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:33:18,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:33:18,363 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,363 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:33:18,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:33:18,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:33:18,364 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,364 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:33:18,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:33:18,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:33:18,365 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,365 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:33:18,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:33:18,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:33:18,366 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,366 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:33:18,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:33:18,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:33:18,367 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,367 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:33:18,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:33:18,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:33:18,368 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,368 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:33:18,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:33:18,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:33:18,369 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,369 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:33:18,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:33:18,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:33:18,369 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,369 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:33:18,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:33:18,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:33:18,370 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,370 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:33:18,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:33:18,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:33:18,371 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,371 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:33:18,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:33:18,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:33:18,371 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,372 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:33:18,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:33:18,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:33:18,372 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,372 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:33:18,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:33:18,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:33:18,373 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,373 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:33:18,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:33:18,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:33:18,374 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,374 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:33:18,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:33:18,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:33:18,375 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,375 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:33:18,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:33:18,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:33:18,375 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,375 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:33:18,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:33:18,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:33:18,376 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,376 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:33:18,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:33:18,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:33:18,377 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,377 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:33:18,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:33:18,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:33:18,378 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,378 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:33:18,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:33:18,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:33:18,378 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,378 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:33:18,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:33:18,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:33:18,379 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,379 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:33:18,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:33:18,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:33:18,380 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,411 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:33:18,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:33:18,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:33:18,413 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,413 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:33:18,413 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:33:18,413 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:33:18,414 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,414 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:33:18,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:33:18,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:33:18,415 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:18,415 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:33:18,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:33:19,238 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:33:19,301 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:33:48,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:33:48,417 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,417 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:33:48,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:33:48,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:33:48,418 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,419 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:33:48,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:33:48,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:33:48,420 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,420 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:33:48,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:33:48,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:33:48,420 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,420 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:33:48,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:33:48,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:33:48,421 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,421 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:33:48,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:33:48,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:33:48,422 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,422 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:33:48,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:33:48,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:33:48,423 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,423 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:33:48,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:33:48,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:33:48,423 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,423 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:33:48,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:33:48,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:33:48,424 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,424 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:33:48,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:33:48,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] starting 2018-07-21T05:33:48,425 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,425 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] Got response: 204 No Content 2018-07-21T05:33:48,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] response is [] 2018-07-21T05:33:48,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] starting 2018-07-21T05:33:48,426 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,426 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] Got response: 204 No Content 2018-07-21T05:33:48,426 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] response is [] 2018-07-21T05:33:48,426 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:33:48,428 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,428 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:33:48,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:33:48,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] starting 2018-07-21T05:33:48,429 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,429 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] Got response: 204 No Content 2018-07-21T05:33:48,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] response is [] 2018-07-21T05:33:48,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:33:48,429 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,429 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:33:48,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:33:48,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:33:48,430 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,430 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:33:48,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:33:48,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] starting 2018-07-21T05:33:48,431 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,431 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] Got response: 204 No Content 2018-07-21T05:33:48,431 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] response is [] 2018-07-21T05:33:48,431 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] starting 2018-07-21T05:33:48,432 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,432 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] Got response: 204 No Content 2018-07-21T05:33:48,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] response is [] 2018-07-21T05:33:48,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:33:48,433 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,433 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:33:48,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:33:48,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:33:48,434 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,434 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:33:48,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:33:48,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] starting 2018-07-21T05:33:48,435 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,435 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] Got response: 204 No Content 2018-07-21T05:33:48,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] response is [] 2018-07-21T05:33:48,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] starting 2018-07-21T05:33:48,435 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,435 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] Got response: 204 No Content 2018-07-21T05:33:48,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] response is [] 2018-07-21T05:33:48,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:33:48,436 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,436 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:33:48,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:33:48,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:33:48,437 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,437 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:33:48,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:33:48,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] starting 2018-07-21T05:33:48,438 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,438 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] Got response: 204 No Content 2018-07-21T05:33:48,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] response is [] 2018-07-21T05:33:48,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] starting 2018-07-21T05:33:48,438 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,438 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] Got response: 204 No Content 2018-07-21T05:33:48,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] response is [] 2018-07-21T05:33:48,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] starting 2018-07-21T05:33:48,439 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,439 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] Got response: 204 No Content 2018-07-21T05:33:48,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] response is [] 2018-07-21T05:33:48,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:33:48,440 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,440 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:33:48,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:33:48,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:33:48,441 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,441 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:33:48,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:33:48,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:33:48,442 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,442 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:33:48,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:33:48,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] starting 2018-07-21T05:33:48,442 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,442 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] Got response: 204 No Content 2018-07-21T05:33:48,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] response is [] 2018-07-21T05:33:48,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] starting 2018-07-21T05:33:48,443 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,443 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] Got response: 204 No Content 2018-07-21T05:33:48,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] response is [] 2018-07-21T05:33:48,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:33:48,444 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,444 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:33:48,444 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:33:48,444 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:33:48,445 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,445 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:33:48,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:33:48,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] starting 2018-07-21T05:33:48,445 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,445 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] Got response: 204 No Content 2018-07-21T05:33:48,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] response is [] 2018-07-21T05:33:48,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] starting 2018-07-21T05:33:48,446 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,446 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] Got response: 204 No Content 2018-07-21T05:33:48,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] response is [] 2018-07-21T05:33:48,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] starting 2018-07-21T05:33:48,447 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,447 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] Got response: 204 No Content 2018-07-21T05:33:48,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] response is [] 2018-07-21T05:33:48,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:33:48,448 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,448 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:33:48,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:33:48,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:33:48,448 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,449 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:33:48,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:33:48,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] starting 2018-07-21T05:33:48,449 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,449 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] Got response: 204 No Content 2018-07-21T05:33:48,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] response is [] 2018-07-21T05:33:48,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] starting 2018-07-21T05:33:48,450 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,450 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] Got response: 204 No Content 2018-07-21T05:33:48,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] response is [] 2018-07-21T05:33:48,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] starting 2018-07-21T05:33:48,451 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,451 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] Got response: 204 No Content 2018-07-21T05:33:48,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] response is [] 2018-07-21T05:33:48,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] starting 2018-07-21T05:33:48,451 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,451 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] Got response: 204 No Content 2018-07-21T05:33:48,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] response is [] 2018-07-21T05:33:48,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] starting 2018-07-21T05:33:48,452 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,452 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] Got response: 204 No Content 2018-07-21T05:33:48,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] response is [] 2018-07-21T05:33:48,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] starting 2018-07-21T05:33:48,453 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,453 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] Got response: 204 No Content 2018-07-21T05:33:48,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] response is [] 2018-07-21T05:33:48,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] starting 2018-07-21T05:33:48,454 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,454 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] Got response: 204 No Content 2018-07-21T05:33:48,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] response is [] 2018-07-21T05:33:48,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] starting 2018-07-21T05:33:48,454 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,454 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] Got response: 204 No Content 2018-07-21T05:33:48,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] response is [] 2018-07-21T05:33:48,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] starting 2018-07-21T05:33:48,455 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,455 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] Got response: 204 No Content 2018-07-21T05:33:48,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] response is [] 2018-07-21T05:33:48,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:33:48,456 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,456 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:33:48,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:33:48,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] starting 2018-07-21T05:33:48,457 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,457 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] Got response: 204 No Content 2018-07-21T05:33:48,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] response is [] 2018-07-21T05:33:48,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:33:48,458 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,458 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:33:48,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:33:48,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] starting 2018-07-21T05:33:48,458 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,458 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] Got response: 204 No Content 2018-07-21T05:33:48,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] response is [] 2018-07-21T05:33:48,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] starting 2018-07-21T05:33:48,459 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,459 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] Got response: 204 No Content 2018-07-21T05:33:48,459 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] response is [] 2018-07-21T05:33:48,459 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:33:48,460 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,460 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:33:48,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:33:48,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:33:48,461 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,461 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:33:48,461 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:33:48,461 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:33:48,462 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,462 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:33:48,462 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:33:48,462 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] starting 2018-07-21T05:33:48,462 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,462 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] Got response: 204 No Content 2018-07-21T05:33:48,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] response is [] 2018-07-21T05:33:48,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:33:48,463 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,463 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:33:48,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:33:48,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:33:48,464 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,464 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:33:48,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:33:48,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:33:48,465 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,465 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:33:48,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:33:48,465 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:33:48,466 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,466 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:33:48,466 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:33:48,466 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:33:48,466 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,466 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:33:48,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:33:48,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:33:48,467 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,467 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:33:48,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:33:48,467 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:33:48,468 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,468 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:33:48,468 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:33:48,468 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:33:48,469 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,469 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:33:48,469 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:33:48,469 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] starting 2018-07-21T05:33:48,470 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,470 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] Got response: 204 No Content 2018-07-21T05:33:48,470 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] response is [] 2018-07-21T05:33:48,470 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:33:48,471 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,471 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:33:48,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:33:48,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] starting 2018-07-21T05:33:48,471 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,471 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] Got response: 204 No Content 2018-07-21T05:33:48,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] response is [] 2018-07-21T05:33:48,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:33:48,472 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,472 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:33:48,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:33:48,472 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:33:48,473 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,473 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:33:48,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:33:48,473 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] starting 2018-07-21T05:33:48,474 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,474 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] Got response: 204 No Content 2018-07-21T05:33:48,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] response is [] 2018-07-21T05:33:48,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] starting 2018-07-21T05:33:48,474 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,474 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] Got response: 204 No Content 2018-07-21T05:33:48,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] response is [] 2018-07-21T05:33:48,474 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:33:48,475 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,475 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:33:48,475 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:33:48,475 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] starting 2018-07-21T05:33:48,476 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,476 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] Got response: 204 No Content 2018-07-21T05:33:48,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] response is [] 2018-07-21T05:33:48,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] starting 2018-07-21T05:33:48,476 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,476 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] Got response: 204 No Content 2018-07-21T05:33:48,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] response is [] 2018-07-21T05:33:48,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:33:48,477 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,477 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:33:48,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:33:48,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:33:48,478 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,478 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:33:48,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:33:48,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:33:48,479 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,479 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:33:48,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:33:48,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] starting 2018-07-21T05:33:48,480 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,480 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] Got response: 204 No Content 2018-07-21T05:33:48,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] response is [] 2018-07-21T05:33:48,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] starting 2018-07-21T05:33:48,481 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,481 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] Got response: 204 No Content 2018-07-21T05:33:48,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] response is [] 2018-07-21T05:33:48,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:33:48,481 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,481 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:33:48,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:33:48,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:33:48,482 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,482 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:33:48,482 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:33:48,482 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:33:48,482 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,482 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:33:48,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:33:48,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] starting 2018-07-21T05:33:48,483 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,483 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] Got response: 204 No Content 2018-07-21T05:33:48,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] response is [] 2018-07-21T05:33:48,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] starting 2018-07-21T05:33:48,484 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,484 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] Got response: 204 No Content 2018-07-21T05:33:48,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] response is [] 2018-07-21T05:33:48,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:33:48,485 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,485 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:33:48,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:33:48,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:33:48,485 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,485 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:33:48,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:33:48,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] starting 2018-07-21T05:33:48,486 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,486 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] Got response: 204 No Content 2018-07-21T05:33:48,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] response is [] 2018-07-21T05:33:48,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] starting 2018-07-21T05:33:48,487 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,487 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] Got response: 204 No Content 2018-07-21T05:33:48,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] response is [] 2018-07-21T05:33:48,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:33:48,488 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,488 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:33:48,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:33:48,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:33:48,489 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,489 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:33:48,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:33:48,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:33:48,489 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,489 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:33:48,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:33:48,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] starting 2018-07-21T05:33:48,490 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,490 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] Got response: 204 No Content 2018-07-21T05:33:48,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] response is [] 2018-07-21T05:33:48,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] starting 2018-07-21T05:33:48,491 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,491 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] Got response: 204 No Content 2018-07-21T05:33:48,491 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] response is [] 2018-07-21T05:33:48,491 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:33:48,492 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,492 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:33:48,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:33:48,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] starting 2018-07-21T05:33:48,492 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,492 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] Got response: 204 No Content 2018-07-21T05:33:48,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] response is [] 2018-07-21T05:33:48,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] starting 2018-07-21T05:33:48,493 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,493 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] Got response: 204 No Content 2018-07-21T05:33:48,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] response is [] 2018-07-21T05:33:48,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:33:48,494 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,494 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:33:48,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:33:48,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] starting 2018-07-21T05:33:48,495 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,495 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] Got response: 204 No Content 2018-07-21T05:33:48,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] response is [] 2018-07-21T05:33:48,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] starting 2018-07-21T05:33:48,495 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,495 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] Got response: 204 No Content 2018-07-21T05:33:48,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] response is [] 2018-07-21T05:33:48,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:33:48,496 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,496 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:33:48,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:33:48,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:33:48,497 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,497 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:33:48,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:33:48,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] starting 2018-07-21T05:33:48,497 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,497 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] Got response: 204 No Content 2018-07-21T05:33:48,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] response is [] 2018-07-21T05:33:48,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:33:48,498 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,498 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:33:48,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:33:48,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] starting 2018-07-21T05:33:48,499 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,499 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] Got response: 204 No Content 2018-07-21T05:33:48,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] response is [] 2018-07-21T05:33:48,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] starting 2018-07-21T05:33:48,500 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,500 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] Got response: 204 No Content 2018-07-21T05:33:48,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] response is [] 2018-07-21T05:33:48,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:33:48,501 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,501 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:33:48,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:33:48,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:33:48,502 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,502 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:33:48,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:33:48,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] starting 2018-07-21T05:33:48,502 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,502 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] Got response: 204 No Content 2018-07-21T05:33:48,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] response is [] 2018-07-21T05:33:48,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:33:48,503 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,503 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:33:48,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:33:48,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:33:48,504 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,504 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:33:48,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:33:48,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] starting 2018-07-21T05:33:48,505 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,505 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] Got response: 204 No Content 2018-07-21T05:33:48,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] response is [] 2018-07-21T05:33:48,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] starting 2018-07-21T05:33:48,505 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,505 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] Got response: 204 No Content 2018-07-21T05:33:48,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] response is [] 2018-07-21T05:33:48,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:33:48,506 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,506 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:33:48,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:33:48,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:33:48,507 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,507 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:33:48,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:33:48,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:33:48,507 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,507 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:33:48,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:33:48,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:33:48,508 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,508 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:33:48,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:33:48,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:33:48,509 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,509 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:33:48,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:33:48,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:33:48,510 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,510 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:33:48,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:33:48,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:33:48,510 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,511 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:33:48,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:33:48,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:33:48,511 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,511 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:33:48,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:33:48,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:33:48,512 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,512 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:33:48,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:33:48,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:33:48,513 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,513 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:33:48,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:33:48,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] starting 2018-07-21T05:33:48,513 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,513 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] Got response: 204 No Content 2018-07-21T05:33:48,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] response is [] 2018-07-21T05:33:48,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:33:48,514 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,514 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:33:48,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:33:48,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:33:48,515 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,515 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:33:48,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:33:48,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] starting 2018-07-21T05:33:48,516 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,516 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] Got response: 204 No Content 2018-07-21T05:33:48,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] response is [] 2018-07-21T05:33:48,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] starting 2018-07-21T05:33:48,517 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,517 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] Got response: 204 No Content 2018-07-21T05:33:48,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] response is [] 2018-07-21T05:33:48,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] starting 2018-07-21T05:33:48,517 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,517 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] Got response: 204 No Content 2018-07-21T05:33:48,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] response is [] 2018-07-21T05:33:48,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:33:48,518 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,518 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:33:48,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:33:48,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] starting 2018-07-21T05:33:48,519 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,519 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] Got response: 204 No Content 2018-07-21T05:33:48,519 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] response is [] 2018-07-21T05:33:48,519 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] starting 2018-07-21T05:33:48,520 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,520 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] Got response: 204 No Content 2018-07-21T05:33:48,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] response is [] 2018-07-21T05:33:48,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:33:48,520 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,520 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:33:48,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:33:48,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:33:48,521 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,521 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:33:48,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:33:48,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:33:48,522 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,522 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:33:48,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:33:48,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] starting 2018-07-21T05:33:48,523 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,523 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] Got response: 204 No Content 2018-07-21T05:33:48,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] response is [] 2018-07-21T05:33:48,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] starting 2018-07-21T05:33:48,524 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,524 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] Got response: 204 No Content 2018-07-21T05:33:48,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] response is [] 2018-07-21T05:33:48,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] starting 2018-07-21T05:33:48,524 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,524 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] Got response: 204 No Content 2018-07-21T05:33:48,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] response is [] 2018-07-21T05:33:48,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:33:48,525 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,525 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:33:48,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:33:48,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:33:48,526 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,526 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:33:48,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:33:48,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:33:48,527 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,527 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:33:48,527 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:33:48,527 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] starting 2018-07-21T05:33:48,528 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,528 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] Got response: 204 No Content 2018-07-21T05:33:48,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] response is [] 2018-07-21T05:33:48,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] starting 2018-07-21T05:33:48,528 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,528 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] Got response: 204 No Content 2018-07-21T05:33:48,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] response is [] 2018-07-21T05:33:48,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] starting 2018-07-21T05:33:48,529 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,529 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] Got response: 204 No Content 2018-07-21T05:33:48,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] response is [] 2018-07-21T05:33:48,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:33:48,530 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,530 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:33:48,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:33:48,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:33:48,530 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,530 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:33:48,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:33:48,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] starting 2018-07-21T05:33:48,531 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,531 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] Got response: 204 No Content 2018-07-21T05:33:48,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] response is [] 2018-07-21T05:33:48,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] starting 2018-07-21T05:33:48,532 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,532 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] Got response: 204 No Content 2018-07-21T05:33:48,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] response is [] 2018-07-21T05:33:48,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] starting 2018-07-21T05:33:48,532 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,532 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] Got response: 204 No Content 2018-07-21T05:33:48,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] response is [] 2018-07-21T05:33:48,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:33:48,533 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,533 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:33:48,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:33:48,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:33:48,534 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,534 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:33:48,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:33:48,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] starting 2018-07-21T05:33:48,535 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,535 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] Got response: 204 No Content 2018-07-21T05:33:48,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] response is [] 2018-07-21T05:33:48,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] starting 2018-07-21T05:33:48,535 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,535 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] Got response: 204 No Content 2018-07-21T05:33:48,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] response is [] 2018-07-21T05:33:48,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] starting 2018-07-21T05:33:48,536 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,536 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] Got response: 204 No Content 2018-07-21T05:33:48,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] response is [] 2018-07-21T05:33:48,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:33:48,537 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,537 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:33:48,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:33:48,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] starting 2018-07-21T05:33:48,537 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,537 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] Got response: 204 No Content 2018-07-21T05:33:48,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] response is [] 2018-07-21T05:33:48,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] starting 2018-07-21T05:33:48,538 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,538 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] Got response: 204 No Content 2018-07-21T05:33:48,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] response is [] 2018-07-21T05:33:48,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] starting 2018-07-21T05:33:48,539 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,539 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] Got response: 204 No Content 2018-07-21T05:33:48,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] response is [] 2018-07-21T05:33:48,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:33:48,540 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,540 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:33:48,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:33:48,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] starting 2018-07-21T05:33:48,540 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,540 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] Got response: 204 No Content 2018-07-21T05:33:48,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] response is [] 2018-07-21T05:33:48,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] starting 2018-07-21T05:33:48,541 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,541 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] Got response: 204 No Content 2018-07-21T05:33:48,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] response is [] 2018-07-21T05:33:48,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] starting 2018-07-21T05:33:48,542 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,542 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] Got response: 204 No Content 2018-07-21T05:33:48,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] response is [] 2018-07-21T05:33:48,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:33:48,542 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,542 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:33:48,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:33:48,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] starting 2018-07-21T05:33:48,543 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,543 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] Got response: 204 No Content 2018-07-21T05:33:48,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] response is [] 2018-07-21T05:33:48,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] starting 2018-07-21T05:33:48,543 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,544 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] Got response: 204 No Content 2018-07-21T05:33:48,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] response is [] 2018-07-21T05:33:48,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:33:48,544 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,544 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:33:48,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:33:48,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] starting 2018-07-21T05:33:48,545 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,545 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] Got response: 204 No Content 2018-07-21T05:33:48,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] response is [] 2018-07-21T05:33:48,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] starting 2018-07-21T05:33:48,546 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,546 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] Got response: 204 No Content 2018-07-21T05:33:48,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] response is [] 2018-07-21T05:33:48,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] starting 2018-07-21T05:33:48,547 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,547 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] Got response: 204 No Content 2018-07-21T05:33:48,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] response is [] 2018-07-21T05:33:48,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:33:48,547 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,547 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:33:48,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:33:48,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:33:48,548 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,548 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:33:48,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:33:48,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] starting 2018-07-21T05:33:48,548 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,548 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] Got response: 204 No Content 2018-07-21T05:33:48,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] response is [] 2018-07-21T05:33:48,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] starting 2018-07-21T05:33:48,549 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,549 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] Got response: 204 No Content 2018-07-21T05:33:48,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] response is [] 2018-07-21T05:33:48,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:33:48,550 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,550 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:33:48,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:33:48,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:33:48,550 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,550 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:33:48,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:33:48,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] starting 2018-07-21T05:33:48,551 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,551 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] Got response: 204 No Content 2018-07-21T05:33:48,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] response is [] 2018-07-21T05:33:48,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] starting 2018-07-21T05:33:48,552 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,552 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] Got response: 204 No Content 2018-07-21T05:33:48,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] response is [] 2018-07-21T05:33:48,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] starting 2018-07-21T05:33:48,553 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,553 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] Got response: 204 No Content 2018-07-21T05:33:48,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] response is [] 2018-07-21T05:33:48,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:33:48,553 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,553 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:33:48,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:33:48,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:33:48,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:33:48,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:33:48,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:33:48,555 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,555 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:33:48,555 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:33:48,555 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:33:48,555 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,555 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:33:48,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:33:48,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] starting 2018-07-21T05:33:48,556 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,556 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] Got response: 204 No Content 2018-07-21T05:33:48,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] response is [] 2018-07-21T05:33:48,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:33:48,557 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,557 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:33:48,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:33:48,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:33:48,557 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,557 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:33:48,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:33:48,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:33:48,558 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,558 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:33:48,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:33:48,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:33:48,559 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,559 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:33:48,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:33:48,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:33:48,559 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,559 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:33:48,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:33:48,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:33:48,560 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,560 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:33:48,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:33:48,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:33:48,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:33:48,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:33:48,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:33:48,562 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,562 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:33:48,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:33:48,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:33:48,562 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,562 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:33:48,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:33:48,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:33:48,563 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,563 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:33:48,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:33:48,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:33:48,564 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,564 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:33:48,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:33:48,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] starting 2018-07-21T05:33:48,565 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,565 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] Got response: 204 No Content 2018-07-21T05:33:48,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] response is [] 2018-07-21T05:33:48,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] starting 2018-07-21T05:33:48,566 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,566 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] Got response: 204 No Content 2018-07-21T05:33:48,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] response is [] 2018-07-21T05:33:48,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:33:48,567 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,567 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:33:48,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:33:48,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] starting 2018-07-21T05:33:48,567 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,567 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] Got response: 204 No Content 2018-07-21T05:33:48,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] response is [] 2018-07-21T05:33:48,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:33:48,568 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,568 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:33:48,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:33:48,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:33:48,569 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,569 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:33:48,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:33:48,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] starting 2018-07-21T05:33:48,569 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,569 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] Got response: 204 No Content 2018-07-21T05:33:48,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] response is [] 2018-07-21T05:33:48,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] starting 2018-07-21T05:33:48,570 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,570 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] Got response: 204 No Content 2018-07-21T05:33:48,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] response is [] 2018-07-21T05:33:48,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] starting 2018-07-21T05:33:48,571 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,571 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] Got response: 204 No Content 2018-07-21T05:33:48,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] response is [] 2018-07-21T05:33:48,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:33:48,572 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,572 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:33:48,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:33:48,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] starting 2018-07-21T05:33:48,572 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,572 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] Got response: 204 No Content 2018-07-21T05:33:48,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] response is [] 2018-07-21T05:33:48,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] starting 2018-07-21T05:33:48,573 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,573 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] Got response: 204 No Content 2018-07-21T05:33:48,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] response is [] 2018-07-21T05:33:48,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] starting 2018-07-21T05:33:48,574 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,574 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] Got response: 204 No Content 2018-07-21T05:33:48,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] response is [] 2018-07-21T05:33:48,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:33:48,575 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,575 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:33:48,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:33:48,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:33:48,575 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,575 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:33:48,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:33:48,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] starting 2018-07-21T05:33:48,576 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,576 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] Got response: 204 No Content 2018-07-21T05:33:48,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] response is [] 2018-07-21T05:33:48,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] starting 2018-07-21T05:33:48,577 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,577 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] Got response: 204 No Content 2018-07-21T05:33:48,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] response is [] 2018-07-21T05:33:48,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] starting 2018-07-21T05:33:48,577 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,577 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] Got response: 204 No Content 2018-07-21T05:33:48,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] response is [] 2018-07-21T05:33:48,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:33:48,578 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,578 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:33:48,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:33:48,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:33:48,579 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,579 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:33:48,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:33:48,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] starting 2018-07-21T05:33:48,580 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,580 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] Got response: 204 No Content 2018-07-21T05:33:48,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] response is [] 2018-07-21T05:33:48,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] starting 2018-07-21T05:33:48,581 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,581 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] Got response: 204 No Content 2018-07-21T05:33:48,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] response is [] 2018-07-21T05:33:48,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] starting 2018-07-21T05:33:48,582 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,582 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] Got response: 204 No Content 2018-07-21T05:33:48,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] response is [] 2018-07-21T05:33:48,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:33:48,582 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,582 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:33:48,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:33:48,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] starting 2018-07-21T05:33:48,583 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,583 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] Got response: 204 No Content 2018-07-21T05:33:48,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] response is [] 2018-07-21T05:33:48,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] starting 2018-07-21T05:33:48,584 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,584 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] Got response: 204 No Content 2018-07-21T05:33:48,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] response is [] 2018-07-21T05:33:48,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] starting 2018-07-21T05:33:48,585 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,585 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] Got response: 204 No Content 2018-07-21T05:33:48,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] response is [] 2018-07-21T05:33:48,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:33:48,585 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,585 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:33:48,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:33:48,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] starting 2018-07-21T05:33:48,586 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,586 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] Got response: 204 No Content 2018-07-21T05:33:48,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] response is [] 2018-07-21T05:33:48,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] starting 2018-07-21T05:33:48,587 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,587 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] Got response: 204 No Content 2018-07-21T05:33:48,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] response is [] 2018-07-21T05:33:48,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] starting 2018-07-21T05:33:48,588 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,588 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] Got response: 204 No Content 2018-07-21T05:33:48,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] response is [] 2018-07-21T05:33:48,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:33:48,588 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,588 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:33:48,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:33:48,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] starting 2018-07-21T05:33:48,589 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,589 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] Got response: 204 No Content 2018-07-21T05:33:48,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] response is [] 2018-07-21T05:33:48,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] starting 2018-07-21T05:33:48,590 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,590 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] Got response: 204 No Content 2018-07-21T05:33:48,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] response is [] 2018-07-21T05:33:48,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] starting 2018-07-21T05:33:48,590 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,590 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] Got response: 204 No Content 2018-07-21T05:33:48,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] response is [] 2018-07-21T05:33:48,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:33:48,591 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,591 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:33:48,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:33:48,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] starting 2018-07-21T05:33:48,592 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,592 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] Got response: 204 No Content 2018-07-21T05:33:48,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] response is [] 2018-07-21T05:33:48,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] starting 2018-07-21T05:33:48,593 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,593 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] Got response: 204 No Content 2018-07-21T05:33:48,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] response is [] 2018-07-21T05:33:48,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] starting 2018-07-21T05:33:48,593 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,594 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] Got response: 204 No Content 2018-07-21T05:33:48,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] response is [] 2018-07-21T05:33:48,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:33:48,594 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,594 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:33:48,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:33:48,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:33:48,595 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,595 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:33:48,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:33:48,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] starting 2018-07-21T05:33:48,596 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,596 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] Got response: 204 No Content 2018-07-21T05:33:48,596 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] response is [] 2018-07-21T05:33:48,596 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] starting 2018-07-21T05:33:48,596 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,597 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] Got response: 204 No Content 2018-07-21T05:33:48,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] response is [] 2018-07-21T05:33:48,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] starting 2018-07-21T05:33:48,597 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,597 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] Got response: 204 No Content 2018-07-21T05:33:48,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] response is [] 2018-07-21T05:33:48,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] starting 2018-07-21T05:33:48,598 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,598 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] Got response: 204 No Content 2018-07-21T05:33:48,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] response is [] 2018-07-21T05:33:48,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] starting 2018-07-21T05:33:48,599 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,599 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] Got response: 204 No Content 2018-07-21T05:33:48,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] response is [] 2018-07-21T05:33:48,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] starting 2018-07-21T05:33:48,600 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,600 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] Got response: 204 No Content 2018-07-21T05:33:48,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] response is [] 2018-07-21T05:33:48,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:33:48,600 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,600 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:33:48,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:33:48,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] starting 2018-07-21T05:33:48,601 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,601 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] Got response: 204 No Content 2018-07-21T05:33:48,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] response is [] 2018-07-21T05:33:48,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:33:48,602 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,602 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:33:48,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:33:48,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] starting 2018-07-21T05:33:48,602 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,602 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] Got response: 204 No Content 2018-07-21T05:33:48,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] response is [] 2018-07-21T05:33:48,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:33:48,603 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,603 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:33:48,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:33:48,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] starting 2018-07-21T05:33:48,604 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,604 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] Got response: 204 No Content 2018-07-21T05:33:48,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] response is [] 2018-07-21T05:33:48,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:33:48,605 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,605 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:33:48,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:33:48,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] starting 2018-07-21T05:33:48,605 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,605 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] Got response: 204 No Content 2018-07-21T05:33:48,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] response is [] 2018-07-21T05:33:48,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:33:48,610 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,610 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:33:48,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:33:48,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:33:48,611 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,611 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:33:48,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:33:48,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:33:48,612 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,612 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:33:48,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:33:48,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:33:48,613 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,613 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:33:48,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:33:48,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:33:48,614 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,614 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:33:48,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:33:48,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:33:48,615 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,615 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:33:48,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:33:48,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:33:48,616 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,616 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:33:48,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:33:48,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] starting 2018-07-21T05:33:48,616 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,616 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] Got response: 204 No Content 2018-07-21T05:33:48,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] response is [] 2018-07-21T05:33:48,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] starting 2018-07-21T05:33:48,617 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,617 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] Got response: 204 No Content 2018-07-21T05:33:48,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] response is [] 2018-07-21T05:33:48,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] starting 2018-07-21T05:33:48,618 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,618 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] Got response: 204 No Content 2018-07-21T05:33:48,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] response is [] 2018-07-21T05:33:48,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] starting 2018-07-21T05:33:48,619 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,619 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] Got response: 204 No Content 2018-07-21T05:33:48,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] response is [] 2018-07-21T05:33:48,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] starting 2018-07-21T05:33:48,620 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,620 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] Got response: 204 No Content 2018-07-21T05:33:48,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] response is [] 2018-07-21T05:33:48,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] starting 2018-07-21T05:33:48,621 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,621 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] Got response: 204 No Content 2018-07-21T05:33:48,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] response is [] 2018-07-21T05:33:48,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] starting 2018-07-21T05:33:48,622 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,622 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] Got response: 204 No Content 2018-07-21T05:33:48,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] response is [] 2018-07-21T05:33:48,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] starting 2018-07-21T05:33:48,624 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,624 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] Got response: 204 No Content 2018-07-21T05:33:48,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] response is [] 2018-07-21T05:33:48,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] starting 2018-07-21T05:33:48,624 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,624 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] Got response: 204 No Content 2018-07-21T05:33:48,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] response is [] 2018-07-21T05:33:48,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] starting 2018-07-21T05:33:48,625 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,625 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] Got response: 204 No Content 2018-07-21T05:33:48,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] response is [] 2018-07-21T05:33:48,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] starting 2018-07-21T05:33:48,626 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,626 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] Got response: 204 No Content 2018-07-21T05:33:48,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] response is [] 2018-07-21T05:33:48,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:33:48,626 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,626 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:33:48,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:33:48,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:33:48,627 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,627 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:33:48,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:33:48,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] starting 2018-07-21T05:33:48,627 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,627 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] Got response: 204 No Content 2018-07-21T05:33:48,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] response is [] 2018-07-21T05:33:48,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] starting 2018-07-21T05:33:48,628 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,628 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] Got response: 204 No Content 2018-07-21T05:33:48,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] response is [] 2018-07-21T05:33:48,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:33:48,629 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,629 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:33:48,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:33:48,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:33:48,630 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,630 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:33:48,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:33:48,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] starting 2018-07-21T05:33:48,630 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,630 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] Got response: 204 No Content 2018-07-21T05:33:48,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] response is [] 2018-07-21T05:33:48,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] starting 2018-07-21T05:33:48,631 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,631 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] Got response: 204 No Content 2018-07-21T05:33:48,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] response is [] 2018-07-21T05:33:48,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:33:48,632 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,632 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:33:48,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:33:48,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:33:48,633 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,633 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:33:48,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:33:48,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] starting 2018-07-21T05:33:48,634 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,634 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] Got response: 204 No Content 2018-07-21T05:33:48,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] response is [] 2018-07-21T05:33:48,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] starting 2018-07-21T05:33:48,635 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,635 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] Got response: 204 No Content 2018-07-21T05:33:48,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] response is [] 2018-07-21T05:33:48,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:33:48,635 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,635 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:33:48,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:33:48,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:33:48,636 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,636 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:33:48,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:33:48,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] starting 2018-07-21T05:33:48,637 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,637 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] Got response: 204 No Content 2018-07-21T05:33:48,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] response is [] 2018-07-21T05:33:48,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] starting 2018-07-21T05:33:48,637 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,637 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] Got response: 204 No Content 2018-07-21T05:33:48,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] response is [] 2018-07-21T05:33:48,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:33:48,638 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,638 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:33:48,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:33:48,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:33:48,639 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,639 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:33:48,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:33:48,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] starting 2018-07-21T05:33:48,640 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,640 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] Got response: 204 No Content 2018-07-21T05:33:48,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] response is [] 2018-07-21T05:33:48,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:33:48,640 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,640 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:33:48,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:33:48,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:33:48,641 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,641 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:33:48,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:33:48,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:33:48,642 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,642 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:33:48,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:33:48,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:33:48,642 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,642 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:33:48,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:33:48,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:33:48,643 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,643 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:33:48,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:33:48,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:33:48,644 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,644 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:33:48,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:33:48,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:33:48,644 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,644 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:33:48,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:33:48,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] starting 2018-07-21T05:33:48,645 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,645 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] Got response: 204 No Content 2018-07-21T05:33:48,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] response is [] 2018-07-21T05:33:48,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] starting 2018-07-21T05:33:48,646 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,646 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] Got response: 204 No Content 2018-07-21T05:33:48,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] response is [] 2018-07-21T05:33:48,646 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] starting 2018-07-21T05:33:48,647 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,647 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] Got response: 204 No Content 2018-07-21T05:33:48,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] response is [] 2018-07-21T05:33:48,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] starting 2018-07-21T05:33:48,648 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,648 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] Got response: 204 No Content 2018-07-21T05:33:48,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] response is [] 2018-07-21T05:33:48,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] starting 2018-07-21T05:33:48,648 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,649 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] Got response: 204 No Content 2018-07-21T05:33:48,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] response is [] 2018-07-21T05:33:48,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] starting 2018-07-21T05:33:48,649 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,649 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] Got response: 204 No Content 2018-07-21T05:33:48,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] response is [] 2018-07-21T05:33:48,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] starting 2018-07-21T05:33:48,650 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,650 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] Got response: 204 No Content 2018-07-21T05:33:48,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] response is [] 2018-07-21T05:33:48,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] starting 2018-07-21T05:33:48,651 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,651 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] Got response: 204 No Content 2018-07-21T05:33:48,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] response is [] 2018-07-21T05:33:48,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] starting 2018-07-21T05:33:48,651 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,651 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] Got response: 204 No Content 2018-07-21T05:33:48,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] response is [] 2018-07-21T05:33:48,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:33:48,652 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,652 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:33:48,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:33:48,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] starting 2018-07-21T05:33:48,653 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,653 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] Got response: 204 No Content 2018-07-21T05:33:48,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] response is [] 2018-07-21T05:33:48,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] starting 2018-07-21T05:33:48,654 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,654 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] Got response: 204 No Content 2018-07-21T05:33:48,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] response is [] 2018-07-21T05:33:48,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:33:48,654 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,654 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:33:48,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:33:48,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] starting 2018-07-21T05:33:48,655 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,655 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] Got response: 204 No Content 2018-07-21T05:33:48,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] response is [] 2018-07-21T05:33:48,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] starting 2018-07-21T05:33:48,656 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,656 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] Got response: 204 No Content 2018-07-21T05:33:48,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] response is [] 2018-07-21T05:33:48,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:33:48,656 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,657 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:33:48,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:33:48,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] starting 2018-07-21T05:33:48,657 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,657 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] Got response: 204 No Content 2018-07-21T05:33:48,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] response is [] 2018-07-21T05:33:48,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] starting 2018-07-21T05:33:48,658 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,658 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] Got response: 204 No Content 2018-07-21T05:33:48,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] response is [] 2018-07-21T05:33:48,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:33:48,659 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,659 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:33:48,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:33:48,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] starting 2018-07-21T05:33:48,659 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,659 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] Got response: 204 No Content 2018-07-21T05:33:48,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] response is [] 2018-07-21T05:33:48,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] starting 2018-07-21T05:33:48,660 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,660 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] Got response: 204 No Content 2018-07-21T05:33:48,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] response is [] 2018-07-21T05:33:48,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:33:48,661 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,661 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:33:48,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:33:48,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] starting 2018-07-21T05:33:48,661 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,661 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] Got response: 204 No Content 2018-07-21T05:33:48,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] response is [] 2018-07-21T05:33:48,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:33:48,662 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,662 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:33:48,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:33:48,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] starting 2018-07-21T05:33:48,663 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,663 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] Got response: 204 No Content 2018-07-21T05:33:48,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] response is [] 2018-07-21T05:33:48,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] starting 2018-07-21T05:33:48,663 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,663 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] Got response: 204 No Content 2018-07-21T05:33:48,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] response is [] 2018-07-21T05:33:48,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:33:48,664 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,664 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:33:48,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:33:48,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:33:48,665 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,665 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:33:48,665 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:33:48,665 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:33:48,666 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,666 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:33:48,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:33:48,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:33:48,667 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,667 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:33:48,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:33:48,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:33:48,668 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,668 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:33:48,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:33:48,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] starting 2018-07-21T05:33:48,668 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,668 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] Got response: 204 No Content 2018-07-21T05:33:48,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] response is [] 2018-07-21T05:33:48,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] starting 2018-07-21T05:33:48,669 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,669 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] Got response: 204 No Content 2018-07-21T05:33:48,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] response is [] 2018-07-21T05:33:48,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] starting 2018-07-21T05:33:48,670 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,670 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] Got response: 204 No Content 2018-07-21T05:33:48,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] response is [] 2018-07-21T05:33:48,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] starting 2018-07-21T05:33:48,670 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,670 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] Got response: 204 No Content 2018-07-21T05:33:48,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] response is [] 2018-07-21T05:33:48,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] starting 2018-07-21T05:33:48,671 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,671 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] Got response: 204 No Content 2018-07-21T05:33:48,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] response is [] 2018-07-21T05:33:48,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] starting 2018-07-21T05:33:48,672 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,672 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] Got response: 204 No Content 2018-07-21T05:33:48,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] response is [] 2018-07-21T05:33:48,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] starting 2018-07-21T05:33:48,672 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,672 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] Got response: 204 No Content 2018-07-21T05:33:48,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] response is [] 2018-07-21T05:33:48,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] starting 2018-07-21T05:33:48,673 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,673 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] Got response: 204 No Content 2018-07-21T05:33:48,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] response is [] 2018-07-21T05:33:48,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] starting 2018-07-21T05:33:48,674 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,674 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] Got response: 204 No Content 2018-07-21T05:33:48,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] response is [] 2018-07-21T05:33:48,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] starting 2018-07-21T05:33:48,674 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,674 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] Got response: 204 No Content 2018-07-21T05:33:48,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] response is [] 2018-07-21T05:33:48,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] starting 2018-07-21T05:33:48,675 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,675 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] Got response: 204 No Content 2018-07-21T05:33:48,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] response is [] 2018-07-21T05:33:48,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] starting 2018-07-21T05:33:48,676 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,676 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] Got response: 204 No Content 2018-07-21T05:33:48,676 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] response is [] 2018-07-21T05:33:48,676 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:33:48,677 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,677 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:33:48,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:33:48,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] starting 2018-07-21T05:33:48,677 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,677 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] Got response: 204 No Content 2018-07-21T05:33:48,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] response is [] 2018-07-21T05:33:48,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] starting 2018-07-21T05:33:48,678 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,678 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] Got response: 204 No Content 2018-07-21T05:33:48,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] response is [] 2018-07-21T05:33:48,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] starting 2018-07-21T05:33:48,679 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,679 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] Got response: 204 No Content 2018-07-21T05:33:48,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] response is [] 2018-07-21T05:33:48,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:33:48,680 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,680 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:33:48,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:33:48,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] starting 2018-07-21T05:33:48,680 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,680 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] Got response: 204 No Content 2018-07-21T05:33:48,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] response is [] 2018-07-21T05:33:48,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] starting 2018-07-21T05:33:48,681 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,681 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] Got response: 204 No Content 2018-07-21T05:33:48,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] response is [] 2018-07-21T05:33:48,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] starting 2018-07-21T05:33:48,682 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,682 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] Got response: 204 No Content 2018-07-21T05:33:48,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] response is [] 2018-07-21T05:33:48,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:33:48,682 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,682 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:33:48,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:33:48,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] starting 2018-07-21T05:33:48,683 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,683 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] Got response: 204 No Content 2018-07-21T05:33:48,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] response is [] 2018-07-21T05:33:48,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] starting 2018-07-21T05:33:48,684 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,684 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] Got response: 204 No Content 2018-07-21T05:33:48,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] response is [] 2018-07-21T05:33:48,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] starting 2018-07-21T05:33:48,684 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,684 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] Got response: 204 No Content 2018-07-21T05:33:48,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] response is [] 2018-07-21T05:33:48,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:33:48,685 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,685 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:33:48,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:33:48,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] starting 2018-07-21T05:33:48,686 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,686 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] Got response: 204 No Content 2018-07-21T05:33:48,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] response is [] 2018-07-21T05:33:48,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] starting 2018-07-21T05:33:48,686 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,686 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] Got response: 204 No Content 2018-07-21T05:33:48,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] response is [] 2018-07-21T05:33:48,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] starting 2018-07-21T05:33:48,687 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,687 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] Got response: 204 No Content 2018-07-21T05:33:48,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] response is [] 2018-07-21T05:33:48,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] starting 2018-07-21T05:33:48,688 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,688 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] Got response: 204 No Content 2018-07-21T05:33:48,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] response is [] 2018-07-21T05:33:48,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] starting 2018-07-21T05:33:48,688 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,688 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] Got response: 204 No Content 2018-07-21T05:33:48,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] response is [] 2018-07-21T05:33:48,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:33:48,689 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,689 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:33:48,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:33:48,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] starting 2018-07-21T05:33:48,690 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,690 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] Got response: 204 No Content 2018-07-21T05:33:48,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] response is [] 2018-07-21T05:33:48,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] starting 2018-07-21T05:33:48,691 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,691 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] Got response: 204 No Content 2018-07-21T05:33:48,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] response is [] 2018-07-21T05:33:48,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] starting 2018-07-21T05:33:48,692 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,692 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] Got response: 204 No Content 2018-07-21T05:33:48,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] response is [] 2018-07-21T05:33:48,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] starting 2018-07-21T05:33:48,693 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,693 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] Got response: 204 No Content 2018-07-21T05:33:48,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] response is [] 2018-07-21T05:33:48,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] starting 2018-07-21T05:33:48,693 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,693 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] Got response: 204 No Content 2018-07-21T05:33:48,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] response is [] 2018-07-21T05:33:48,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] starting 2018-07-21T05:33:48,694 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,694 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] Got response: 204 No Content 2018-07-21T05:33:48,694 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] response is [] 2018-07-21T05:33:48,694 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] starting 2018-07-21T05:33:48,695 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,695 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] Got response: 204 No Content 2018-07-21T05:33:48,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] response is [] 2018-07-21T05:33:48,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] starting 2018-07-21T05:33:48,695 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,695 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] Got response: 204 No Content 2018-07-21T05:33:48,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] response is [] 2018-07-21T05:33:48,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] starting 2018-07-21T05:33:48,696 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,696 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] Got response: 204 No Content 2018-07-21T05:33:48,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] response is [] 2018-07-21T05:33:48,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] starting 2018-07-21T05:33:48,697 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,697 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] Got response: 204 No Content 2018-07-21T05:33:48,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] response is [] 2018-07-21T05:33:48,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] starting 2018-07-21T05:33:48,697 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,697 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] Got response: 204 No Content 2018-07-21T05:33:48,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] response is [] 2018-07-21T05:33:48,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] starting 2018-07-21T05:33:48,698 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,698 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] Got response: 204 No Content 2018-07-21T05:33:48,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] response is [] 2018-07-21T05:33:48,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] starting 2018-07-21T05:33:48,699 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,699 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] Got response: 204 No Content 2018-07-21T05:33:48,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] response is [] 2018-07-21T05:33:48,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:33:48,700 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,700 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:33:48,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:33:48,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] starting 2018-07-21T05:33:48,701 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,701 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] Got response: 204 No Content 2018-07-21T05:33:48,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] response is [] 2018-07-21T05:33:48,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] starting 2018-07-21T05:33:48,702 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,702 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] Got response: 204 No Content 2018-07-21T05:33:48,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] response is [] 2018-07-21T05:33:48,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] starting 2018-07-21T05:33:48,702 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,702 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] Got response: 204 No Content 2018-07-21T05:33:48,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] response is [] 2018-07-21T05:33:48,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] starting 2018-07-21T05:33:48,703 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,703 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] Got response: 204 No Content 2018-07-21T05:33:48,703 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] response is [] 2018-07-21T05:33:48,703 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] starting 2018-07-21T05:33:48,704 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,704 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] Got response: 204 No Content 2018-07-21T05:33:48,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] response is [] 2018-07-21T05:33:48,704 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] starting 2018-07-21T05:33:48,705 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,705 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] Got response: 204 No Content 2018-07-21T05:33:48,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] response is [] 2018-07-21T05:33:48,705 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] starting 2018-07-21T05:33:48,706 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,706 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] Got response: 204 No Content 2018-07-21T05:33:48,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] response is [] 2018-07-21T05:33:48,706 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] starting 2018-07-21T05:33:48,707 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,707 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] Got response: 204 No Content 2018-07-21T05:33:48,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] response is [] 2018-07-21T05:33:48,707 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] starting 2018-07-21T05:33:48,708 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,708 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] Got response: 204 No Content 2018-07-21T05:33:48,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] response is [] 2018-07-21T05:33:48,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] starting 2018-07-21T05:33:48,708 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,708 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] Got response: 204 No Content 2018-07-21T05:33:48,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] response is [] 2018-07-21T05:33:48,708 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] starting 2018-07-21T05:33:48,709 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,709 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] Got response: 204 No Content 2018-07-21T05:33:48,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] response is [] 2018-07-21T05:33:48,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] starting 2018-07-21T05:33:48,710 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,710 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] Got response: 204 No Content 2018-07-21T05:33:48,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] response is [] 2018-07-21T05:33:48,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] starting 2018-07-21T05:33:48,711 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,711 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] Got response: 204 No Content 2018-07-21T05:33:48,712 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] response is [] 2018-07-21T05:33:48,712 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] starting 2018-07-21T05:33:48,713 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,713 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] Got response: 204 No Content 2018-07-21T05:33:48,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] response is [] 2018-07-21T05:33:48,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] starting 2018-07-21T05:33:48,714 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,714 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] Got response: 204 No Content 2018-07-21T05:33:48,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] response is [] 2018-07-21T05:33:48,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] starting 2018-07-21T05:33:48,715 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,715 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] Got response: 204 No Content 2018-07-21T05:33:48,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] response is [] 2018-07-21T05:33:48,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] starting 2018-07-21T05:33:48,715 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,715 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] Got response: 204 No Content 2018-07-21T05:33:48,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] response is [] 2018-07-21T05:33:48,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:33:48,716 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,716 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:33:48,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:33:48,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:33:48,717 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,717 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:33:48,717 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:33:48,717 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:33:48,718 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,718 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:33:48,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:33:48,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] starting 2018-07-21T05:33:48,718 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,719 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] Got response: 204 No Content 2018-07-21T05:33:48,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] response is [] 2018-07-21T05:33:48,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] starting 2018-07-21T05:33:48,719 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,719 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] Got response: 204 No Content 2018-07-21T05:33:48,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] response is [] 2018-07-21T05:33:48,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] starting 2018-07-21T05:33:48,720 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,720 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] Got response: 204 No Content 2018-07-21T05:33:48,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] response is [] 2018-07-21T05:33:48,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:33:48,721 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,721 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:33:48,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:33:48,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:33:48,721 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,721 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:33:48,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:33:48,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:33:48,722 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,722 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:33:48,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:33:48,722 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:33:48,723 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,723 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:33:48,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:33:48,723 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:33:48,723 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,724 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:33:48,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:33:48,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:33:48,724 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,724 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:33:48,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:33:48,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:33:48,725 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,725 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:33:48,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:33:48,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:33:48,726 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,726 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:33:48,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:33:48,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:33:48,727 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,727 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:33:48,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:33:48,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:33:48,727 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,727 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:33:48,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:33:48,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:33:48,728 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,728 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:33:48,728 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:33:48,728 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:33:48,729 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,729 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:33:48,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:33:48,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:33:48,729 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,729 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:33:48,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:33:48,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:33:48,730 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,730 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:33:48,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:33:48,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] starting 2018-07-21T05:33:48,731 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,731 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] Got response: 204 No Content 2018-07-21T05:33:48,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] response is [] 2018-07-21T05:33:48,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] starting 2018-07-21T05:33:48,731 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,731 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] Got response: 204 No Content 2018-07-21T05:33:48,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] response is [] 2018-07-21T05:33:48,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] starting 2018-07-21T05:33:48,732 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,732 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] Got response: 204 No Content 2018-07-21T05:33:48,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] response is [] 2018-07-21T05:33:48,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:33:48,733 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,733 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:33:48,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:33:48,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] starting 2018-07-21T05:33:48,734 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,734 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] Got response: 204 No Content 2018-07-21T05:33:48,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] response is [] 2018-07-21T05:33:48,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] starting 2018-07-21T05:33:48,734 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,735 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] Got response: 204 No Content 2018-07-21T05:33:48,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] response is [] 2018-07-21T05:33:48,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] starting 2018-07-21T05:33:48,735 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,735 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] Got response: 204 No Content 2018-07-21T05:33:48,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] response is [] 2018-07-21T05:33:48,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] starting 2018-07-21T05:33:48,736 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,736 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] Got response: 204 No Content 2018-07-21T05:33:48,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] response is [] 2018-07-21T05:33:48,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] starting 2018-07-21T05:33:48,737 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,737 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] Got response: 204 No Content 2018-07-21T05:33:48,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] response is [] 2018-07-21T05:33:48,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] starting 2018-07-21T05:33:48,737 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,737 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] Got response: 204 No Content 2018-07-21T05:33:48,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] response is [] 2018-07-21T05:33:48,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:33:48,738 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,738 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:33:48,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:33:48,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:33:48,738 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,738 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:33:48,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:33:48,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:33:48,739 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,739 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:33:48,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:33:48,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:33:48,740 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,740 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:33:48,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:33:48,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:33:48,741 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,741 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:33:48,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:33:48,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:33:48,741 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,741 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:33:48,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:33:48,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:33:48,742 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,742 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:33:48,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:33:48,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:33:48,743 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,743 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:33:48,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:33:48,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] starting 2018-07-21T05:33:48,743 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,743 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] Got response: 204 No Content 2018-07-21T05:33:48,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] response is [] 2018-07-21T05:33:48,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] starting 2018-07-21T05:33:48,744 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,744 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] Got response: 204 No Content 2018-07-21T05:33:48,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] response is [] 2018-07-21T05:33:48,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] starting 2018-07-21T05:33:48,745 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,745 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] Got response: 204 No Content 2018-07-21T05:33:48,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] response is [] 2018-07-21T05:33:48,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] starting 2018-07-21T05:33:48,745 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,745 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] Got response: 204 No Content 2018-07-21T05:33:48,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] response is [] 2018-07-21T05:33:48,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] starting 2018-07-21T05:33:48,746 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,746 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] Got response: 204 No Content 2018-07-21T05:33:48,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] response is [] 2018-07-21T05:33:48,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] starting 2018-07-21T05:33:48,747 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,747 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] Got response: 204 No Content 2018-07-21T05:33:48,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] response is [] 2018-07-21T05:33:48,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:33:48,747 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,747 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:33:48,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:33:48,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] starting 2018-07-21T05:33:48,748 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,748 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] Got response: 204 No Content 2018-07-21T05:33:48,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] response is [] 2018-07-21T05:33:48,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] starting 2018-07-21T05:33:48,749 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,749 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] Got response: 204 No Content 2018-07-21T05:33:48,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] response is [] 2018-07-21T05:33:48,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] starting 2018-07-21T05:33:48,750 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,750 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] Got response: 204 No Content 2018-07-21T05:33:48,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] response is [] 2018-07-21T05:33:48,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:33:48,750 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,750 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:33:48,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:33:48,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] starting 2018-07-21T05:33:48,751 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,751 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] Got response: 204 No Content 2018-07-21T05:33:48,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] response is [] 2018-07-21T05:33:48,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] starting 2018-07-21T05:33:48,751 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,751 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] Got response: 204 No Content 2018-07-21T05:33:48,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] response is [] 2018-07-21T05:33:48,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] starting 2018-07-21T05:33:48,752 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,752 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] Got response: 204 No Content 2018-07-21T05:33:48,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] response is [] 2018-07-21T05:33:48,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:33:48,753 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,753 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:33:48,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:33:48,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] starting 2018-07-21T05:33:48,753 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,753 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] Got response: 204 No Content 2018-07-21T05:33:48,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] response is [] 2018-07-21T05:33:48,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] starting 2018-07-21T05:33:48,754 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,754 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] Got response: 204 No Content 2018-07-21T05:33:48,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] response is [] 2018-07-21T05:33:48,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] starting 2018-07-21T05:33:48,755 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,755 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] Got response: 204 No Content 2018-07-21T05:33:48,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] response is [] 2018-07-21T05:33:48,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:33:48,756 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,756 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:33:48,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:33:48,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] starting 2018-07-21T05:33:48,757 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,757 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] Got response: 204 No Content 2018-07-21T05:33:48,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] response is [] 2018-07-21T05:33:48,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] starting 2018-07-21T05:33:48,757 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,757 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] Got response: 204 No Content 2018-07-21T05:33:48,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] response is [] 2018-07-21T05:33:48,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] starting 2018-07-21T05:33:48,758 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,758 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] Got response: 204 No Content 2018-07-21T05:33:48,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] response is [] 2018-07-21T05:33:48,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:33:48,759 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,759 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:33:48,759 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:33:48,759 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] starting 2018-07-21T05:33:48,760 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,760 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] Got response: 204 No Content 2018-07-21T05:33:48,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] response is [] 2018-07-21T05:33:48,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] starting 2018-07-21T05:33:48,761 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,761 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] Got response: 204 No Content 2018-07-21T05:33:48,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] response is [] 2018-07-21T05:33:48,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] starting 2018-07-21T05:33:48,761 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,761 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] Got response: 204 No Content 2018-07-21T05:33:48,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] response is [] 2018-07-21T05:33:48,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:33:48,762 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,762 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:33:48,762 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:33:48,762 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] starting 2018-07-21T05:33:48,763 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,763 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] Got response: 204 No Content 2018-07-21T05:33:48,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] response is [] 2018-07-21T05:33:48,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] starting 2018-07-21T05:33:48,763 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,763 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] Got response: 204 No Content 2018-07-21T05:33:48,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] response is [] 2018-07-21T05:33:48,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] starting 2018-07-21T05:33:48,764 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,764 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] Got response: 204 No Content 2018-07-21T05:33:48,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] response is [] 2018-07-21T05:33:48,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] starting 2018-07-21T05:33:48,765 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,765 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] Got response: 204 No Content 2018-07-21T05:33:48,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] response is [] 2018-07-21T05:33:48,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] starting 2018-07-21T05:33:48,766 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,766 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] Got response: 204 No Content 2018-07-21T05:33:48,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] response is [] 2018-07-21T05:33:48,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] starting 2018-07-21T05:33:48,766 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,766 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] Got response: 204 No Content 2018-07-21T05:33:48,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] response is [] 2018-07-21T05:33:48,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:33:48,767 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,767 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:33:48,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:33:48,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] starting 2018-07-21T05:33:48,768 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,768 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] Got response: 204 No Content 2018-07-21T05:33:48,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] response is [] 2018-07-21T05:33:48,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] starting 2018-07-21T05:33:48,769 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,769 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] Got response: 204 No Content 2018-07-21T05:33:48,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] response is [] 2018-07-21T05:33:48,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] starting 2018-07-21T05:33:48,769 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,769 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] Got response: 204 No Content 2018-07-21T05:33:48,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] response is [] 2018-07-21T05:33:48,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] starting 2018-07-21T05:33:48,770 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,770 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] Got response: 204 No Content 2018-07-21T05:33:48,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] response is [] 2018-07-21T05:33:48,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] starting 2018-07-21T05:33:48,771 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,771 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] Got response: 204 No Content 2018-07-21T05:33:48,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] response is [] 2018-07-21T05:33:48,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] starting 2018-07-21T05:33:48,771 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,771 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] Got response: 204 No Content 2018-07-21T05:33:48,771 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] response is [] 2018-07-21T05:33:48,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] starting 2018-07-21T05:33:48,772 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,772 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] Got response: 204 No Content 2018-07-21T05:33:48,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] response is [] 2018-07-21T05:33:48,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] starting 2018-07-21T05:33:48,773 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,773 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] Got response: 204 No Content 2018-07-21T05:33:48,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] response is [] 2018-07-21T05:33:48,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:33:48,774 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,774 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:33:48,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:33:48,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:33:48,775 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,775 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:33:48,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:33:48,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] starting 2018-07-21T05:33:48,776 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,776 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] Got response: 204 No Content 2018-07-21T05:33:48,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] response is [] 2018-07-21T05:33:48,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:33:48,777 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,777 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:33:48,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:33:48,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:33:48,778 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,778 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:33:48,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:33:48,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:33:48,779 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,779 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:33:48,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:33:48,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:33:48,779 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,779 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:33:48,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:33:48,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:33:48,780 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,780 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:33:48,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:33:48,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:33:48,781 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,781 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:33:48,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:33:48,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:33:48,781 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,781 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:33:48,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:33:48,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] starting 2018-07-21T05:33:48,782 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,782 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] Got response: 204 No Content 2018-07-21T05:33:48,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] response is [] 2018-07-21T05:33:48,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] starting 2018-07-21T05:33:48,783 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,783 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] Got response: 204 No Content 2018-07-21T05:33:48,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] response is [] 2018-07-21T05:33:48,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] starting 2018-07-21T05:33:48,784 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,784 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] Got response: 204 No Content 2018-07-21T05:33:48,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] response is [] 2018-07-21T05:33:48,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] starting 2018-07-21T05:33:48,785 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,785 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] Got response: 204 No Content 2018-07-21T05:33:48,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] response is [] 2018-07-21T05:33:48,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] starting 2018-07-21T05:33:48,785 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,785 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] Got response: 204 No Content 2018-07-21T05:33:48,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] response is [] 2018-07-21T05:33:48,785 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] starting 2018-07-21T05:33:48,786 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,786 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] Got response: 204 No Content 2018-07-21T05:33:48,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] response is [] 2018-07-21T05:33:48,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] starting 2018-07-21T05:33:48,787 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,787 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] Got response: 204 No Content 2018-07-21T05:33:48,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] response is [] 2018-07-21T05:33:48,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] starting 2018-07-21T05:33:48,787 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,787 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] Got response: 204 No Content 2018-07-21T05:33:48,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] response is [] 2018-07-21T05:33:48,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] starting 2018-07-21T05:33:48,788 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,788 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] Got response: 204 No Content 2018-07-21T05:33:48,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] response is [] 2018-07-21T05:33:48,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] starting 2018-07-21T05:33:48,789 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,789 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] Got response: 204 No Content 2018-07-21T05:33:48,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] response is [] 2018-07-21T05:33:48,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] starting 2018-07-21T05:33:48,789 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,789 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] Got response: 204 No Content 2018-07-21T05:33:48,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] response is [] 2018-07-21T05:33:48,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] starting 2018-07-21T05:33:48,790 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,790 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] Got response: 204 No Content 2018-07-21T05:33:48,790 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] response is [] 2018-07-21T05:33:48,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] starting 2018-07-21T05:33:48,791 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,791 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] Got response: 204 No Content 2018-07-21T05:33:48,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] response is [] 2018-07-21T05:33:48,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] starting 2018-07-21T05:33:48,792 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,792 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] Got response: 204 No Content 2018-07-21T05:33:48,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] response is [] 2018-07-21T05:33:48,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] starting 2018-07-21T05:33:48,793 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,793 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] Got response: 204 No Content 2018-07-21T05:33:48,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] response is [] 2018-07-21T05:33:48,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] starting 2018-07-21T05:33:48,794 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,794 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] Got response: 204 No Content 2018-07-21T05:33:48,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] response is [] 2018-07-21T05:33:48,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] starting 2018-07-21T05:33:48,795 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,795 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] Got response: 204 No Content 2018-07-21T05:33:48,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] response is [] 2018-07-21T05:33:48,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] starting 2018-07-21T05:33:48,795 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,795 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] Got response: 204 No Content 2018-07-21T05:33:48,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] response is [] 2018-07-21T05:33:48,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] starting 2018-07-21T05:33:48,796 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,796 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] Got response: 204 No Content 2018-07-21T05:33:48,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] response is [] 2018-07-21T05:33:48,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] starting 2018-07-21T05:33:48,797 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,797 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] Got response: 204 No Content 2018-07-21T05:33:48,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] response is [] 2018-07-21T05:33:48,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] starting 2018-07-21T05:33:48,798 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,798 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] Got response: 204 No Content 2018-07-21T05:33:48,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] response is [] 2018-07-21T05:33:48,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] starting 2018-07-21T05:33:48,798 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,798 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] Got response: 204 No Content 2018-07-21T05:33:48,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] response is [] 2018-07-21T05:33:48,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] starting 2018-07-21T05:33:48,799 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,799 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] Got response: 204 No Content 2018-07-21T05:33:48,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] response is [] 2018-07-21T05:33:48,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] starting 2018-07-21T05:33:48,800 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,800 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] Got response: 204 No Content 2018-07-21T05:33:48,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] response is [] 2018-07-21T05:33:48,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] starting 2018-07-21T05:33:48,801 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,801 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] Got response: 204 No Content 2018-07-21T05:33:48,801 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] response is [] 2018-07-21T05:33:48,801 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] starting 2018-07-21T05:33:48,802 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,802 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] Got response: 204 No Content 2018-07-21T05:33:48,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] response is [] 2018-07-21T05:33:48,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] starting 2018-07-21T05:33:48,802 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,802 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] Got response: 204 No Content 2018-07-21T05:33:48,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] response is [] 2018-07-21T05:33:48,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] starting 2018-07-21T05:33:48,803 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,803 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] Got response: 204 No Content 2018-07-21T05:33:48,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] response is [] 2018-07-21T05:33:48,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] starting 2018-07-21T05:33:48,804 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,804 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] Got response: 204 No Content 2018-07-21T05:33:48,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] response is [] 2018-07-21T05:33:48,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:33:48,805 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,805 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:33:48,805 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:33:48,805 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:33:48,806 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,806 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:33:48,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:33:48,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] starting 2018-07-21T05:33:48,807 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,807 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] Got response: 204 No Content 2018-07-21T05:33:48,807 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] response is [] 2018-07-21T05:33:48,807 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:33:48,807 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,807 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:33:48,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:33:48,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:33:48,808 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,808 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:33:48,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:33:48,808 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:33:48,809 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,809 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:33:48,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:33:48,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:33:48,810 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,810 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:33:48,810 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:33:48,810 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:33:48,810 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,810 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:33:48,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:33:48,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:33:48,811 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,811 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:33:48,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:33:48,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:33:48,812 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,812 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:33:48,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:33:48,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:33:48,812 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,812 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:33:48,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:33:48,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] starting 2018-07-21T05:33:48,813 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,813 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] Got response: 204 No Content 2018-07-21T05:33:48,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] response is [] 2018-07-21T05:33:48,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] starting 2018-07-21T05:33:48,814 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,814 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] Got response: 204 No Content 2018-07-21T05:33:48,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] response is [] 2018-07-21T05:33:48,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] starting 2018-07-21T05:33:48,814 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,814 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] Got response: 204 No Content 2018-07-21T05:33:48,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] response is [] 2018-07-21T05:33:48,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] starting 2018-07-21T05:33:48,815 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,815 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] Got response: 204 No Content 2018-07-21T05:33:48,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] response is [] 2018-07-21T05:33:48,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] starting 2018-07-21T05:33:48,816 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,816 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] Got response: 204 No Content 2018-07-21T05:33:48,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] response is [] 2018-07-21T05:33:48,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] starting 2018-07-21T05:33:48,816 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,816 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] Got response: 204 No Content 2018-07-21T05:33:48,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] response is [] 2018-07-21T05:33:48,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] starting 2018-07-21T05:33:48,817 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,817 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] Got response: 204 No Content 2018-07-21T05:33:48,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] response is [] 2018-07-21T05:33:48,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] starting 2018-07-21T05:33:48,818 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,818 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] Got response: 204 No Content 2018-07-21T05:33:48,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] response is [] 2018-07-21T05:33:48,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] starting 2018-07-21T05:33:48,818 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,818 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] Got response: 204 No Content 2018-07-21T05:33:48,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] response is [] 2018-07-21T05:33:48,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] starting 2018-07-21T05:33:48,819 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,819 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] Got response: 204 No Content 2018-07-21T05:33:48,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] response is [] 2018-07-21T05:33:48,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] starting 2018-07-21T05:33:48,819 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,819 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] Got response: 204 No Content 2018-07-21T05:33:48,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] response is [] 2018-07-21T05:33:48,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] starting 2018-07-21T05:33:48,820 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,820 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] Got response: 204 No Content 2018-07-21T05:33:48,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] response is [] 2018-07-21T05:33:48,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] starting 2018-07-21T05:33:48,821 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,821 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] Got response: 204 No Content 2018-07-21T05:33:48,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] response is [] 2018-07-21T05:33:48,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] starting 2018-07-21T05:33:48,821 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,821 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] Got response: 204 No Content 2018-07-21T05:33:48,821 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] response is [] 2018-07-21T05:33:48,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] starting 2018-07-21T05:33:48,822 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,822 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] Got response: 204 No Content 2018-07-21T05:33:48,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] response is [] 2018-07-21T05:33:48,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] starting 2018-07-21T05:33:48,823 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,823 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] Got response: 204 No Content 2018-07-21T05:33:48,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] response is [] 2018-07-21T05:33:48,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] starting 2018-07-21T05:33:48,823 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,823 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] Got response: 204 No Content 2018-07-21T05:33:48,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] response is [] 2018-07-21T05:33:48,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] starting 2018-07-21T05:33:48,824 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,824 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] Got response: 204 No Content 2018-07-21T05:33:48,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] response is [] 2018-07-21T05:33:48,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] starting 2018-07-21T05:33:48,825 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,825 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] Got response: 204 No Content 2018-07-21T05:33:48,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] response is [] 2018-07-21T05:33:48,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] starting 2018-07-21T05:33:48,825 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,825 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] Got response: 204 No Content 2018-07-21T05:33:48,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] response is [] 2018-07-21T05:33:48,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] starting 2018-07-21T05:33:48,826 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,826 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] Got response: 204 No Content 2018-07-21T05:33:48,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] response is [] 2018-07-21T05:33:48,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] starting 2018-07-21T05:33:48,826 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,826 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] Got response: 204 No Content 2018-07-21T05:33:48,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] response is [] 2018-07-21T05:33:48,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] starting 2018-07-21T05:33:48,827 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,827 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] Got response: 204 No Content 2018-07-21T05:33:48,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] response is [] 2018-07-21T05:33:48,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] starting 2018-07-21T05:33:48,827 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,827 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] Got response: 204 No Content 2018-07-21T05:33:48,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] response is [] 2018-07-21T05:33:48,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] starting 2018-07-21T05:33:48,828 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,828 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] Got response: 204 No Content 2018-07-21T05:33:48,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] response is [] 2018-07-21T05:33:48,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] starting 2018-07-21T05:33:48,828 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,828 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] Got response: 204 No Content 2018-07-21T05:33:48,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] response is [] 2018-07-21T05:33:48,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:33:48,829 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,829 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:33:48,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:33:48,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] starting 2018-07-21T05:33:48,829 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,829 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] Got response: 204 No Content 2018-07-21T05:33:48,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] response is [] 2018-07-21T05:33:48,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:33:48,830 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,830 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:33:48,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:33:48,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] starting 2018-07-21T05:33:48,831 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,831 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] Got response: 204 No Content 2018-07-21T05:33:48,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] response is [] 2018-07-21T05:33:48,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] starting 2018-07-21T05:33:48,831 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,831 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] Got response: 204 No Content 2018-07-21T05:33:48,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] response is [] 2018-07-21T05:33:48,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:33:48,832 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,832 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:33:48,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:33:48,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:33:48,832 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,832 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:33:48,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:33:48,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:33:48,833 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,833 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:33:48,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:33:48,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:33:48,833 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,833 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:33:48,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:33:48,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:33:48,834 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,834 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:33:48,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:33:48,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:33:48,835 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,835 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:33:48,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:33:48,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:33:48,835 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,835 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:33:48,835 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:33:48,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:33:48,836 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,836 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:33:48,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:33:48,836 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] starting 2018-07-21T05:33:48,837 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,837 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] Got response: 204 No Content 2018-07-21T05:33:48,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] response is [] 2018-07-21T05:33:48,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] starting 2018-07-21T05:33:48,837 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,837 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] Got response: 204 No Content 2018-07-21T05:33:48,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] response is [] 2018-07-21T05:33:48,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] starting 2018-07-21T05:33:48,838 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,838 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] Got response: 204 No Content 2018-07-21T05:33:48,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] response is [] 2018-07-21T05:33:48,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] starting 2018-07-21T05:33:48,838 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,838 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] Got response: 204 No Content 2018-07-21T05:33:48,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] response is [] 2018-07-21T05:33:48,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] starting 2018-07-21T05:33:48,839 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,839 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] Got response: 204 No Content 2018-07-21T05:33:48,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] response is [] 2018-07-21T05:33:48,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] starting 2018-07-21T05:33:48,840 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,840 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] Got response: 204 No Content 2018-07-21T05:33:48,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] response is [] 2018-07-21T05:33:48,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] starting 2018-07-21T05:33:48,840 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,840 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] Got response: 204 No Content 2018-07-21T05:33:48,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] response is [] 2018-07-21T05:33:48,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] starting 2018-07-21T05:33:48,841 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,841 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] Got response: 204 No Content 2018-07-21T05:33:48,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] response is [] 2018-07-21T05:33:48,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] starting 2018-07-21T05:33:48,842 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,842 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] Got response: 204 No Content 2018-07-21T05:33:48,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] response is [] 2018-07-21T05:33:48,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] starting 2018-07-21T05:33:48,842 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,842 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] Got response: 204 No Content 2018-07-21T05:33:48,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] response is [] 2018-07-21T05:33:48,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] starting 2018-07-21T05:33:48,843 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,843 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] Got response: 204 No Content 2018-07-21T05:33:48,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] response is [] 2018-07-21T05:33:48,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:33:48,843 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,843 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:33:48,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:33:48,843 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:33:48,844 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,844 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:33:48,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:33:48,844 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:33:48,845 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,845 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:33:48,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:33:48,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:33:48,845 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,845 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:33:48,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:33:48,845 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:33:48,846 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,846 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:33:48,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:33:48,846 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:33:48,847 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,847 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:33:48,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:33:48,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:33:48,847 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,847 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:33:48,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:33:48,847 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:33:48,848 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,848 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:33:48,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:33:48,848 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:33:48,849 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,849 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:33:48,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:33:48,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:33:48,849 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,849 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:33:48,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:33:48,849 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:33:48,850 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,850 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:33:48,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:33:48,850 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:33:48,851 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,851 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:33:48,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:33:48,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:33:48,851 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,851 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:33:48,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:33:48,851 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:33:48,852 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,852 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:33:48,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:33:48,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:33:48,852 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,852 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:33:48,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:33:48,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:33:48,853 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,853 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:33:48,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:33:48,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:33:48,854 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,854 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:33:48,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:33:48,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:33:48,854 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,854 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:33:48,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:33:48,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:33:48,855 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,855 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:33:48,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:33:48,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] starting 2018-07-21T05:33:48,855 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,855 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] Got response: 204 No Content 2018-07-21T05:33:48,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] response is [] 2018-07-21T05:33:48,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] starting 2018-07-21T05:33:48,856 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,856 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] Got response: 204 No Content 2018-07-21T05:33:48,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] response is [] 2018-07-21T05:33:48,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] starting 2018-07-21T05:33:48,857 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,857 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] Got response: 204 No Content 2018-07-21T05:33:48,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] response is [] 2018-07-21T05:33:48,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] starting 2018-07-21T05:33:48,857 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,857 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] Got response: 204 No Content 2018-07-21T05:33:48,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] response is [] 2018-07-21T05:33:48,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] starting 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] Got response: 204 No Content 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] response is [] 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] starting 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] Got response: 204 No Content 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] response is [] 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] starting 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,858 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] Got response: 204 No Content 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] response is [] 2018-07-21T05:33:48,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] starting 2018-07-21T05:33:48,859 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,859 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] Got response: 204 No Content 2018-07-21T05:33:48,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] response is [] 2018-07-21T05:33:48,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] starting 2018-07-21T05:33:48,859 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,859 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] Got response: 204 No Content 2018-07-21T05:33:48,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] response is [] 2018-07-21T05:33:48,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] starting 2018-07-21T05:33:48,860 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,860 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] Got response: 204 No Content 2018-07-21T05:33:48,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] response is [] 2018-07-21T05:33:48,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:33:48,860 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,860 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:33:48,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:33:48,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:33:48,861 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,861 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:33:48,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:33:48,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:33:48,862 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,862 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:33:48,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:33:48,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:33:48,862 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,862 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:33:48,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:33:48,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:33:48,863 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,863 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:33:48,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:33:48,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:33:48,864 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,864 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:33:48,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:33:48,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:33:48,864 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,864 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:33:48,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:33:48,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:33:48,865 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,865 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:33:48,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:33:48,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:33:48,866 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,866 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:33:48,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:33:48,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:33:48,866 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,866 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:33:48,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:33:48,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:33:48,867 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,867 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:33:48,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:33:48,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:33:48,867 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,868 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:33:48,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:33:48,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:33:48,868 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,868 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:33:48,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:33:48,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:33:48,869 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,869 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:33:48,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:33:48,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:33:48,869 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,869 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:33:48,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:33:48,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:33:48,870 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,870 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:33:48,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:33:48,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:33:48,870 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,870 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:33:48,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:33:48,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:33:48,871 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,871 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:33:48,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:33:48,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:33:48,871 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,871 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:33:48,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:33:48,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:33:48,872 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,872 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:33:48,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:33:48,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:33:48,873 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,873 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:33:48,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:33:48,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:33:48,873 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,873 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:33:48,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:33:48,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:33:48,874 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,874 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:33:48,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:33:48,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:33:48,874 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,875 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:33:48,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:33:48,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:33:48,875 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,875 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:33:48,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:33:48,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:33:48,876 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,876 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:33:48,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:33:48,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:33:48,876 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,876 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:33:48,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:33:48,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:33:48,877 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,877 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:33:48,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:33:48,877 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:33:48,878 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,878 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:33:48,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:33:48,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:33:48,878 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,878 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:33:48,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:33:48,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:33:48,879 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,879 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:33:48,879 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:33:48,879 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:33:48,880 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,880 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:33:48,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:33:48,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:33:48,881 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,881 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:33:48,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:33:48,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:33:48,881 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,881 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:33:48,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:33:48,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:33:48,882 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,882 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:33:48,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:33:48,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:33:48,882 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,882 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:33:48,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:33:48,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:33:48,883 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,883 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:33:48,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:33:48,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:33:48,883 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,883 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:33:48,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:33:48,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:33:48,884 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,884 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:33:48,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:33:48,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:33:48,885 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:33:48 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:33:48,885 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:33:48,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:33:49,239 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:33:49,301 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:34:18,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:34:18,887 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,887 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:34:18,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:34:18,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:34:18,888 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,888 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:34:18,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:34:18,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:34:18,889 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,889 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:34:18,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:34:18,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:34:18,890 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,890 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:34:18,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:34:18,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:34:18,890 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,890 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:34:18,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:34:18,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:34:18,891 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,891 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:34:18,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:34:18,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:34:18,892 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,892 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:34:18,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:34:18,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:34:18,893 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,893 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:34:18,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:34:18,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:34:18,893 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,893 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:34:18,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:34:18,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] starting 2018-07-21T05:34:18,894 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,894 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] Got response: 204 No Content 2018-07-21T05:34:18,894 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] response is [] 2018-07-21T05:34:18,894 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] starting 2018-07-21T05:34:18,895 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,895 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] Got response: 204 No Content 2018-07-21T05:34:18,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] response is [] 2018-07-21T05:34:18,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:34:18,895 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,895 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:34:18,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:34:18,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] starting 2018-07-21T05:34:18,896 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,896 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] Got response: 204 No Content 2018-07-21T05:34:18,896 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] response is [] 2018-07-21T05:34:18,896 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:34:18,897 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,897 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:34:18,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:34:18,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:34:18,898 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,898 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:34:18,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:34:18,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] starting 2018-07-21T05:34:18,898 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,898 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] Got response: 204 No Content 2018-07-21T05:34:18,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] response is [] 2018-07-21T05:34:18,898 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] starting 2018-07-21T05:34:18,899 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,899 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] Got response: 204 No Content 2018-07-21T05:34:18,899 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] response is [] 2018-07-21T05:34:18,899 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:34:18,900 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,900 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:34:18,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:34:18,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:34:18,905 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,905 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:34:18,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:34:18,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] starting 2018-07-21T05:34:18,906 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,906 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] Got response: 204 No Content 2018-07-21T05:34:18,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] response is [] 2018-07-21T05:34:18,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] starting 2018-07-21T05:34:18,907 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,907 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] Got response: 204 No Content 2018-07-21T05:34:18,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] response is [] 2018-07-21T05:34:18,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:34:18,908 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,908 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:34:18,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:34:18,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:34:18,909 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,909 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:34:18,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:34:18,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] starting 2018-07-21T05:34:18,910 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,910 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] Got response: 204 No Content 2018-07-21T05:34:18,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] response is [] 2018-07-21T05:34:18,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] starting 2018-07-21T05:34:18,911 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,911 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] Got response: 204 No Content 2018-07-21T05:34:18,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] response is [] 2018-07-21T05:34:18,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] starting 2018-07-21T05:34:18,912 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,912 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] Got response: 204 No Content 2018-07-21T05:34:18,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] response is [] 2018-07-21T05:34:18,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:34:18,912 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,912 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:34:18,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:34:18,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:34:18,913 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,913 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:34:18,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:34:18,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:34:18,914 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,914 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:34:18,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:34:18,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] starting 2018-07-21T05:34:18,915 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,915 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] Got response: 204 No Content 2018-07-21T05:34:18,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] response is [] 2018-07-21T05:34:18,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] starting 2018-07-21T05:34:18,916 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,916 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] Got response: 204 No Content 2018-07-21T05:34:18,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] response is [] 2018-07-21T05:34:18,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:34:18,917 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,917 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:34:18,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:34:18,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:34:18,917 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,917 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:34:18,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:34:18,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] starting 2018-07-21T05:34:18,918 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,918 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] Got response: 204 No Content 2018-07-21T05:34:18,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] response is [] 2018-07-21T05:34:18,918 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] starting 2018-07-21T05:34:18,919 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,919 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] Got response: 204 No Content 2018-07-21T05:34:18,919 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] response is [] 2018-07-21T05:34:18,919 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] starting 2018-07-21T05:34:18,920 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,920 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] Got response: 204 No Content 2018-07-21T05:34:18,920 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] response is [] 2018-07-21T05:34:18,920 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:34:18,921 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,921 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:34:18,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:34:18,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:34:18,921 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,921 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:34:18,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:34:18,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] starting 2018-07-21T05:34:18,922 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,922 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] Got response: 204 No Content 2018-07-21T05:34:18,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] response is [] 2018-07-21T05:34:18,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] starting 2018-07-21T05:34:18,923 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,923 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] Got response: 204 No Content 2018-07-21T05:34:18,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] response is [] 2018-07-21T05:34:18,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] starting 2018-07-21T05:34:18,924 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,924 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] Got response: 204 No Content 2018-07-21T05:34:18,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] response is [] 2018-07-21T05:34:18,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] starting 2018-07-21T05:34:18,925 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,925 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] Got response: 204 No Content 2018-07-21T05:34:18,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] response is [] 2018-07-21T05:34:18,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] starting 2018-07-21T05:34:18,925 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,925 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] Got response: 204 No Content 2018-07-21T05:34:18,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] response is [] 2018-07-21T05:34:18,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] starting 2018-07-21T05:34:18,926 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,926 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] Got response: 204 No Content 2018-07-21T05:34:18,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] response is [] 2018-07-21T05:34:18,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] starting 2018-07-21T05:34:18,927 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,927 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] Got response: 204 No Content 2018-07-21T05:34:18,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] response is [] 2018-07-21T05:34:18,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] starting 2018-07-21T05:34:18,927 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,927 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] Got response: 204 No Content 2018-07-21T05:34:18,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] response is [] 2018-07-21T05:34:18,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] starting 2018-07-21T05:34:18,928 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,928 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] Got response: 204 No Content 2018-07-21T05:34:18,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] response is [] 2018-07-21T05:34:18,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:34:18,929 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,929 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:34:18,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:34:18,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] starting 2018-07-21T05:34:18,930 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,930 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] Got response: 204 No Content 2018-07-21T05:34:18,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] response is [] 2018-07-21T05:34:18,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:34:18,931 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,931 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:34:18,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:34:18,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] starting 2018-07-21T05:34:18,931 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,931 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] Got response: 204 No Content 2018-07-21T05:34:18,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] response is [] 2018-07-21T05:34:18,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] starting 2018-07-21T05:34:18,932 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,932 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] Got response: 204 No Content 2018-07-21T05:34:18,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] response is [] 2018-07-21T05:34:18,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:34:18,933 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,933 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:34:18,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:34:18,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:34:18,934 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,934 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:34:18,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:34:18,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:34:18,934 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,934 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:34:18,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:34:18,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] starting 2018-07-21T05:34:18,938 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,938 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] Got response: 204 No Content 2018-07-21T05:34:18,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] response is [] 2018-07-21T05:34:18,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:34:18,939 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,939 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:34:18,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:34:18,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:34:18,939 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,939 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:34:18,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:34:18,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:34:18,940 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,940 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:34:18,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:34:18,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:34:18,942 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,942 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:34:18,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:34:18,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:34:18,942 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,942 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:34:18,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:34:18,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:34:18,943 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,943 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:34:18,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:34:18,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:34:18,944 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,944 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:34:18,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:34:18,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:34:18,945 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,945 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:34:18,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:34:18,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] starting 2018-07-21T05:34:18,945 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,945 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] Got response: 204 No Content 2018-07-21T05:34:18,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] response is [] 2018-07-21T05:34:18,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:34:18,946 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,946 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:34:18,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:34:18,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] starting 2018-07-21T05:34:18,947 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,947 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] Got response: 204 No Content 2018-07-21T05:34:18,947 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] response is [] 2018-07-21T05:34:18,947 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:34:18,948 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,948 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:34:18,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:34:18,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:34:18,948 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,949 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:34:18,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:34:18,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] starting 2018-07-21T05:34:18,949 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,949 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] Got response: 204 No Content 2018-07-21T05:34:18,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] response is [] 2018-07-21T05:34:18,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] starting 2018-07-21T05:34:18,950 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,950 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] Got response: 204 No Content 2018-07-21T05:34:18,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] response is [] 2018-07-21T05:34:18,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:34:18,951 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,951 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:34:18,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:34:18,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] starting 2018-07-21T05:34:18,952 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,952 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] Got response: 204 No Content 2018-07-21T05:34:18,952 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] response is [] 2018-07-21T05:34:18,952 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] starting 2018-07-21T05:34:18,953 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,953 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] Got response: 204 No Content 2018-07-21T05:34:18,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] response is [] 2018-07-21T05:34:18,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:34:18,953 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,953 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:34:18,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:34:18,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:34:18,954 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,954 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:34:18,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:34:18,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:34:18,955 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,955 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:34:18,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:34:18,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] starting 2018-07-21T05:34:18,955 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,955 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] Got response: 204 No Content 2018-07-21T05:34:18,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] response is [] 2018-07-21T05:34:18,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] starting 2018-07-21T05:34:18,956 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,956 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] Got response: 204 No Content 2018-07-21T05:34:18,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] response is [] 2018-07-21T05:34:18,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:34:18,957 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,957 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:34:18,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:34:18,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:34:18,958 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,958 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:34:18,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:34:18,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:34:18,959 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,959 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:34:18,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:34:18,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] starting 2018-07-21T05:34:18,960 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,960 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] Got response: 204 No Content 2018-07-21T05:34:18,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] response is [] 2018-07-21T05:34:18,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] starting 2018-07-21T05:34:18,961 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,961 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] Got response: 204 No Content 2018-07-21T05:34:18,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] response is [] 2018-07-21T05:34:18,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:34:18,962 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,962 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:34:18,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:34:18,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:34:18,962 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,962 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:34:18,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:34:18,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] starting 2018-07-21T05:34:18,963 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,963 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] Got response: 204 No Content 2018-07-21T05:34:18,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] response is [] 2018-07-21T05:34:18,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] starting 2018-07-21T05:34:18,964 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,964 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] Got response: 204 No Content 2018-07-21T05:34:18,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] response is [] 2018-07-21T05:34:18,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:34:18,964 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,964 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:34:18,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:34:18,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:34:18,965 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,965 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:34:18,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:34:18,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:34:18,966 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,966 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:34:18,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:34:18,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] starting 2018-07-21T05:34:18,967 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,967 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] Got response: 204 No Content 2018-07-21T05:34:18,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] response is [] 2018-07-21T05:34:18,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] starting 2018-07-21T05:34:18,967 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,967 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] Got response: 204 No Content 2018-07-21T05:34:18,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] response is [] 2018-07-21T05:34:18,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:34:18,968 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,968 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:34:18,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:34:18,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] starting 2018-07-21T05:34:18,969 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,969 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] Got response: 204 No Content 2018-07-21T05:34:18,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] response is [] 2018-07-21T05:34:18,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] starting 2018-07-21T05:34:18,970 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,970 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] Got response: 204 No Content 2018-07-21T05:34:18,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] response is [] 2018-07-21T05:34:18,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:34:18,970 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,970 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:34:18,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:34:18,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] starting 2018-07-21T05:34:18,971 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,971 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] Got response: 204 No Content 2018-07-21T05:34:18,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] response is [] 2018-07-21T05:34:18,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] starting 2018-07-21T05:34:18,972 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,972 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] Got response: 204 No Content 2018-07-21T05:34:18,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] response is [] 2018-07-21T05:34:18,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:34:18,973 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,973 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:34:18,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:34:18,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:34:18,974 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,974 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:34:18,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:34:18,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] starting 2018-07-21T05:34:18,975 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,975 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] Got response: 204 No Content 2018-07-21T05:34:18,975 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] response is [] 2018-07-21T05:34:18,975 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:34:18,976 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,976 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:34:18,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:34:18,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] starting 2018-07-21T05:34:18,976 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,976 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] Got response: 204 No Content 2018-07-21T05:34:18,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] response is [] 2018-07-21T05:34:18,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] starting 2018-07-21T05:34:18,977 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,977 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] Got response: 204 No Content 2018-07-21T05:34:18,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] response is [] 2018-07-21T05:34:18,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:34:18,977 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,977 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:34:18,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:34:18,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:34:18,978 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,978 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:34:18,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:34:18,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] starting 2018-07-21T05:34:18,979 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,979 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] Got response: 204 No Content 2018-07-21T05:34:18,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] response is [] 2018-07-21T05:34:18,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:34:18,979 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,979 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:34:18,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:34:18,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:34:18,980 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,980 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:34:18,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:34:18,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] starting 2018-07-21T05:34:18,981 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,981 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] Got response: 204 No Content 2018-07-21T05:34:18,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] response is [] 2018-07-21T05:34:18,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] starting 2018-07-21T05:34:18,982 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,982 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] Got response: 204 No Content 2018-07-21T05:34:18,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] response is [] 2018-07-21T05:34:18,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:34:18,982 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,982 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:34:18,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:34:18,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:34:18,983 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,983 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:34:18,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:34:18,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:34:18,984 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,984 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:34:18,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:34:18,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:34:18,984 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,984 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:34:18,984 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:34:18,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:34:18,985 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,985 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:34:18,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:34:18,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:34:18,986 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,986 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:34:18,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:34:18,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:34:18,987 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,987 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:34:18,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:34:18,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:34:18,987 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,987 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:34:18,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:34:18,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:34:18,988 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,988 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:34:18,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:34:18,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:34:18,989 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,989 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:34:18,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:34:18,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] starting 2018-07-21T05:34:18,989 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,989 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] Got response: 204 No Content 2018-07-21T05:34:18,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] response is [] 2018-07-21T05:34:18,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:34:18,990 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,990 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:34:18,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:34:18,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:34:18,991 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,991 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:34:18,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:34:18,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] starting 2018-07-21T05:34:18,991 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,991 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] Got response: 204 No Content 2018-07-21T05:34:18,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] response is [] 2018-07-21T05:34:18,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] starting 2018-07-21T05:34:18,992 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,992 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] Got response: 204 No Content 2018-07-21T05:34:18,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] response is [] 2018-07-21T05:34:18,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] starting 2018-07-21T05:34:18,993 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,993 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] Got response: 204 No Content 2018-07-21T05:34:18,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] response is [] 2018-07-21T05:34:18,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:34:18,994 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,994 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:34:18,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:34:18,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] starting 2018-07-21T05:34:18,994 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,994 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] Got response: 204 No Content 2018-07-21T05:34:18,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] response is [] 2018-07-21T05:34:18,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] starting 2018-07-21T05:34:18,995 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,995 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] Got response: 204 No Content 2018-07-21T05:34:18,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] response is [] 2018-07-21T05:34:18,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:34:18,998 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,998 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:34:18,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:34:18,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:34:18,999 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:18 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:18,999 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:34:18,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:34:18,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:34:19,000 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,000 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:34:19,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:34:19,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] starting 2018-07-21T05:34:19,001 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,001 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] Got response: 204 No Content 2018-07-21T05:34:19,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] response is [] 2018-07-21T05:34:19,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] starting 2018-07-21T05:34:19,002 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,002 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] Got response: 204 No Content 2018-07-21T05:34:19,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] response is [] 2018-07-21T05:34:19,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] starting 2018-07-21T05:34:19,003 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,003 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] Got response: 204 No Content 2018-07-21T05:34:19,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] response is [] 2018-07-21T05:34:19,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:34:19,003 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,003 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:34:19,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:34:19,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:34:19,004 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,004 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:34:19,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:34:19,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:34:19,005 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,005 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:34:19,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:34:19,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] starting 2018-07-21T05:34:19,006 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,006 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] Got response: 204 No Content 2018-07-21T05:34:19,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] response is [] 2018-07-21T05:34:19,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] starting 2018-07-21T05:34:19,006 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,006 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] Got response: 204 No Content 2018-07-21T05:34:19,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] response is [] 2018-07-21T05:34:19,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] starting 2018-07-21T05:34:19,007 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,007 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] Got response: 204 No Content 2018-07-21T05:34:19,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] response is [] 2018-07-21T05:34:19,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:34:19,008 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,008 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:34:19,008 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:34:19,008 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:34:19,009 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,009 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:34:19,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:34:19,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] starting 2018-07-21T05:34:19,010 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,010 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] Got response: 204 No Content 2018-07-21T05:34:19,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] response is [] 2018-07-21T05:34:19,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] starting 2018-07-21T05:34:19,011 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,011 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] Got response: 204 No Content 2018-07-21T05:34:19,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] response is [] 2018-07-21T05:34:19,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] starting 2018-07-21T05:34:19,012 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,012 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] Got response: 204 No Content 2018-07-21T05:34:19,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] response is [] 2018-07-21T05:34:19,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:34:19,012 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,012 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:34:19,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:34:19,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:34:19,013 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,013 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:34:19,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:34:19,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] starting 2018-07-21T05:34:19,014 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,014 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] Got response: 204 No Content 2018-07-21T05:34:19,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] response is [] 2018-07-21T05:34:19,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] starting 2018-07-21T05:34:19,015 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,015 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] Got response: 204 No Content 2018-07-21T05:34:19,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] response is [] 2018-07-21T05:34:19,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] starting 2018-07-21T05:34:19,016 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,016 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] Got response: 204 No Content 2018-07-21T05:34:19,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] response is [] 2018-07-21T05:34:19,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:34:19,017 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,017 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:34:19,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:34:19,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] starting 2018-07-21T05:34:19,018 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,018 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] Got response: 204 No Content 2018-07-21T05:34:19,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] response is [] 2018-07-21T05:34:19,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] starting 2018-07-21T05:34:19,019 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,019 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] Got response: 204 No Content 2018-07-21T05:34:19,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] response is [] 2018-07-21T05:34:19,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] starting 2018-07-21T05:34:19,020 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,020 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] Got response: 204 No Content 2018-07-21T05:34:19,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] response is [] 2018-07-21T05:34:19,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:34:19,021 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,021 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:34:19,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:34:19,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] starting 2018-07-21T05:34:19,022 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,022 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] Got response: 204 No Content 2018-07-21T05:34:19,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] response is [] 2018-07-21T05:34:19,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] starting 2018-07-21T05:34:19,039 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,039 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] Got response: 204 No Content 2018-07-21T05:34:19,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] response is [] 2018-07-21T05:34:19,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] starting 2018-07-21T05:34:19,045 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,045 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] Got response: 204 No Content 2018-07-21T05:34:19,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] response is [] 2018-07-21T05:34:19,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:34:19,051 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,051 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:34:19,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:34:19,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] starting 2018-07-21T05:34:19,052 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,052 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] Got response: 204 No Content 2018-07-21T05:34:19,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] response is [] 2018-07-21T05:34:19,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] starting 2018-07-21T05:34:19,065 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,065 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] Got response: 204 No Content 2018-07-21T05:34:19,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] response is [] 2018-07-21T05:34:19,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:34:19,072 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,072 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:34:19,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:34:19,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] starting 2018-07-21T05:34:19,077 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,077 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] Got response: 204 No Content 2018-07-21T05:34:19,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] response is [] 2018-07-21T05:34:19,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] starting 2018-07-21T05:34:19,078 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,078 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] Got response: 204 No Content 2018-07-21T05:34:19,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] response is [] 2018-07-21T05:34:19,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] starting 2018-07-21T05:34:19,079 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,079 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] Got response: 204 No Content 2018-07-21T05:34:19,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] response is [] 2018-07-21T05:34:19,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:34:19,080 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,080 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:34:19,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:34:19,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:34:19,081 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,084 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:34:19,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:34:19,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] starting 2018-07-21T05:34:19,085 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,085 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] Got response: 204 No Content 2018-07-21T05:34:19,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] response is [] 2018-07-21T05:34:19,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] starting 2018-07-21T05:34:19,086 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,086 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] Got response: 204 No Content 2018-07-21T05:34:19,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] response is [] 2018-07-21T05:34:19,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:34:19,087 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,087 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:34:19,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:34:19,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:34:19,088 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,088 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:34:19,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:34:19,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] starting 2018-07-21T05:34:19,089 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,089 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] Got response: 204 No Content 2018-07-21T05:34:19,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] response is [] 2018-07-21T05:34:19,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] starting 2018-07-21T05:34:19,090 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,090 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] Got response: 204 No Content 2018-07-21T05:34:19,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] response is [] 2018-07-21T05:34:19,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] starting 2018-07-21T05:34:19,091 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,091 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] Got response: 204 No Content 2018-07-21T05:34:19,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] response is [] 2018-07-21T05:34:19,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:34:19,092 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,092 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:34:19,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:34:19,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:34:19,092 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,093 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:34:19,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:34:19,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:34:19,101 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,101 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:34:19,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:34:19,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:34:19,102 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,102 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:34:19,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:34:19,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] starting 2018-07-21T05:34:19,103 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,103 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] Got response: 204 No Content 2018-07-21T05:34:19,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] response is [] 2018-07-21T05:34:19,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:34:19,104 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,104 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:34:19,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:34:19,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:34:19,105 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,105 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:34:19,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:34:19,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:34:19,106 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,107 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:34:19,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:34:19,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:34:19,107 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,107 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:34:19,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:34:19,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:34:19,108 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,109 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:34:19,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:34:19,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:34:19,109 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,110 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:34:19,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:34:19,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:34:19,110 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,111 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:34:19,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:34:19,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:34:19,112 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,112 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:34:19,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:34:19,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:34:19,113 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,113 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:34:19,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:34:19,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:34:19,114 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,114 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:34:19,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:34:19,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:34:19,114 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,115 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:34:19,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:34:19,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] starting 2018-07-21T05:34:19,116 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,116 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] Got response: 204 No Content 2018-07-21T05:34:19,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] response is [] 2018-07-21T05:34:19,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] starting 2018-07-21T05:34:19,116 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,117 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] Got response: 204 No Content 2018-07-21T05:34:19,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] response is [] 2018-07-21T05:34:19,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:34:19,118 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,118 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:34:19,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:34:19,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] starting 2018-07-21T05:34:19,119 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,119 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] Got response: 204 No Content 2018-07-21T05:34:19,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] response is [] 2018-07-21T05:34:19,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:34:19,120 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,120 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:34:19,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:34:19,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:34:19,120 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,120 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:34:19,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:34:19,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] starting 2018-07-21T05:34:19,122 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,122 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] Got response: 204 No Content 2018-07-21T05:34:19,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] response is [] 2018-07-21T05:34:19,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] starting 2018-07-21T05:34:19,122 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,122 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] Got response: 204 No Content 2018-07-21T05:34:19,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] response is [] 2018-07-21T05:34:19,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] starting 2018-07-21T05:34:19,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,124 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] Got response: 204 No Content 2018-07-21T05:34:19,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] response is [] 2018-07-21T05:34:19,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:34:19,125 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,125 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:34:19,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:34:19,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] starting 2018-07-21T05:34:19,129 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,129 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] Got response: 204 No Content 2018-07-21T05:34:19,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] response is [] 2018-07-21T05:34:19,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] starting 2018-07-21T05:34:19,131 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,131 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] Got response: 204 No Content 2018-07-21T05:34:19,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] response is [] 2018-07-21T05:34:19,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] starting 2018-07-21T05:34:19,132 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,132 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] Got response: 204 No Content 2018-07-21T05:34:19,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] response is [] 2018-07-21T05:34:19,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:34:19,133 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,133 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:34:19,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:34:19,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:34:19,133 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,133 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:34:19,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:34:19,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] starting 2018-07-21T05:34:19,134 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,134 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] Got response: 204 No Content 2018-07-21T05:34:19,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] response is [] 2018-07-21T05:34:19,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] starting 2018-07-21T05:34:19,135 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,135 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] Got response: 204 No Content 2018-07-21T05:34:19,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] response is [] 2018-07-21T05:34:19,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] starting 2018-07-21T05:34:19,136 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,136 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] Got response: 204 No Content 2018-07-21T05:34:19,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] response is [] 2018-07-21T05:34:19,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:34:19,137 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,137 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:34:19,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:34:19,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:34:19,137 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,137 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:34:19,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:34:19,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] starting 2018-07-21T05:34:19,138 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,138 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] Got response: 204 No Content 2018-07-21T05:34:19,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] response is [] 2018-07-21T05:34:19,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] starting 2018-07-21T05:34:19,139 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,139 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] Got response: 204 No Content 2018-07-21T05:34:19,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] response is [] 2018-07-21T05:34:19,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] starting 2018-07-21T05:34:19,145 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,145 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] Got response: 204 No Content 2018-07-21T05:34:19,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] response is [] 2018-07-21T05:34:19,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:34:19,146 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,146 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:34:19,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:34:19,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] starting 2018-07-21T05:34:19,147 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,148 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] Got response: 204 No Content 2018-07-21T05:34:19,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] response is [] 2018-07-21T05:34:19,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] starting 2018-07-21T05:34:19,148 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,148 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] Got response: 204 No Content 2018-07-21T05:34:19,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] response is [] 2018-07-21T05:34:19,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] starting 2018-07-21T05:34:19,149 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,149 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] Got response: 204 No Content 2018-07-21T05:34:19,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] response is [] 2018-07-21T05:34:19,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:34:19,150 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,150 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:34:19,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:34:19,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] starting 2018-07-21T05:34:19,151 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,151 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] Got response: 204 No Content 2018-07-21T05:34:19,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] response is [] 2018-07-21T05:34:19,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] starting 2018-07-21T05:34:19,151 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,151 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] Got response: 204 No Content 2018-07-21T05:34:19,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] response is [] 2018-07-21T05:34:19,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] starting 2018-07-21T05:34:19,152 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,152 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] Got response: 204 No Content 2018-07-21T05:34:19,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] response is [] 2018-07-21T05:34:19,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:34:19,153 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,153 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:34:19,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:34:19,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] starting 2018-07-21T05:34:19,154 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,154 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] Got response: 204 No Content 2018-07-21T05:34:19,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] response is [] 2018-07-21T05:34:19,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] starting 2018-07-21T05:34:19,154 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,154 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] Got response: 204 No Content 2018-07-21T05:34:19,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] response is [] 2018-07-21T05:34:19,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] starting 2018-07-21T05:34:19,155 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,155 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] Got response: 204 No Content 2018-07-21T05:34:19,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] response is [] 2018-07-21T05:34:19,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:34:19,156 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,156 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:34:19,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:34:19,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] starting 2018-07-21T05:34:19,156 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,156 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] Got response: 204 No Content 2018-07-21T05:34:19,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] response is [] 2018-07-21T05:34:19,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] starting 2018-07-21T05:34:19,157 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,157 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] Got response: 204 No Content 2018-07-21T05:34:19,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] response is [] 2018-07-21T05:34:19,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] starting 2018-07-21T05:34:19,158 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,158 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] Got response: 204 No Content 2018-07-21T05:34:19,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] response is [] 2018-07-21T05:34:19,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:34:19,159 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,159 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:34:19,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:34:19,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:34:19,165 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,165 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:34:19,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:34:19,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] starting 2018-07-21T05:34:19,166 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,166 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] Got response: 204 No Content 2018-07-21T05:34:19,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] response is [] 2018-07-21T05:34:19,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] starting 2018-07-21T05:34:19,167 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,167 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] Got response: 204 No Content 2018-07-21T05:34:19,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] response is [] 2018-07-21T05:34:19,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] starting 2018-07-21T05:34:19,168 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,168 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] Got response: 204 No Content 2018-07-21T05:34:19,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] response is [] 2018-07-21T05:34:19,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] starting 2018-07-21T05:34:19,169 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,169 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] Got response: 204 No Content 2018-07-21T05:34:19,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] response is [] 2018-07-21T05:34:19,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] starting 2018-07-21T05:34:19,170 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,170 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] Got response: 204 No Content 2018-07-21T05:34:19,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] response is [] 2018-07-21T05:34:19,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] starting 2018-07-21T05:34:19,170 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,170 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] Got response: 204 No Content 2018-07-21T05:34:19,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] response is [] 2018-07-21T05:34:19,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:34:19,175 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,175 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:34:19,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:34:19,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] starting 2018-07-21T05:34:19,177 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,177 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] Got response: 204 No Content 2018-07-21T05:34:19,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] response is [] 2018-07-21T05:34:19,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:34:19,178 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,179 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:34:19,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:34:19,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] starting 2018-07-21T05:34:19,179 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,179 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] Got response: 204 No Content 2018-07-21T05:34:19,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] response is [] 2018-07-21T05:34:19,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:34:19,180 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,180 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:34:19,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:34:19,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] starting 2018-07-21T05:34:19,181 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,181 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] Got response: 204 No Content 2018-07-21T05:34:19,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] response is [] 2018-07-21T05:34:19,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:34:19,181 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,181 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:34:19,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:34:19,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] starting 2018-07-21T05:34:19,182 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,182 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] Got response: 204 No Content 2018-07-21T05:34:19,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] response is [] 2018-07-21T05:34:19,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:34:19,183 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,183 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:34:19,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:34:19,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:34:19,184 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,184 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:34:19,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:34:19,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:34:19,184 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,184 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:34:19,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:34:19,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:34:19,185 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,185 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:34:19,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:34:19,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:34:19,186 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,186 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:34:19,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:34:19,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:34:19,186 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,186 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:34:19,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:34:19,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:34:19,187 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,187 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:34:19,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:34:19,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] starting 2018-07-21T05:34:19,188 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,188 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] Got response: 204 No Content 2018-07-21T05:34:19,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] response is [] 2018-07-21T05:34:19,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] starting 2018-07-21T05:34:19,190 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,190 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] Got response: 204 No Content 2018-07-21T05:34:19,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] response is [] 2018-07-21T05:34:19,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] starting 2018-07-21T05:34:19,191 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,191 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] Got response: 204 No Content 2018-07-21T05:34:19,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] response is [] 2018-07-21T05:34:19,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] starting 2018-07-21T05:34:19,192 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,192 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] Got response: 204 No Content 2018-07-21T05:34:19,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] response is [] 2018-07-21T05:34:19,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] starting 2018-07-21T05:34:19,192 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,192 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] Got response: 204 No Content 2018-07-21T05:34:19,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] response is [] 2018-07-21T05:34:19,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] starting 2018-07-21T05:34:19,193 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,193 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] Got response: 204 No Content 2018-07-21T05:34:19,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] response is [] 2018-07-21T05:34:19,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] starting 2018-07-21T05:34:19,194 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,194 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] Got response: 204 No Content 2018-07-21T05:34:19,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] response is [] 2018-07-21T05:34:19,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] starting 2018-07-21T05:34:19,194 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,194 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] Got response: 204 No Content 2018-07-21T05:34:19,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] response is [] 2018-07-21T05:34:19,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] starting 2018-07-21T05:34:19,195 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,195 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] Got response: 204 No Content 2018-07-21T05:34:19,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] response is [] 2018-07-21T05:34:19,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] starting 2018-07-21T05:34:19,196 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,196 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] Got response: 204 No Content 2018-07-21T05:34:19,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] response is [] 2018-07-21T05:34:19,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] starting 2018-07-21T05:34:19,196 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,196 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] Got response: 204 No Content 2018-07-21T05:34:19,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] response is [] 2018-07-21T05:34:19,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:34:19,198 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,198 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:34:19,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:34:19,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:34:19,199 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,199 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:34:19,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:34:19,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] starting 2018-07-21T05:34:19,200 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,200 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] Got response: 204 No Content 2018-07-21T05:34:19,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] response is [] 2018-07-21T05:34:19,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] starting 2018-07-21T05:34:19,201 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,201 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] Got response: 204 No Content 2018-07-21T05:34:19,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] response is [] 2018-07-21T05:34:19,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:34:19,202 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,202 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:34:19,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:34:19,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:34:19,203 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,203 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:34:19,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:34:19,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] starting 2018-07-21T05:34:19,203 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,203 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] Got response: 204 No Content 2018-07-21T05:34:19,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] response is [] 2018-07-21T05:34:19,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] starting 2018-07-21T05:34:19,205 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,205 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] Got response: 204 No Content 2018-07-21T05:34:19,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] response is [] 2018-07-21T05:34:19,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:34:19,206 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,206 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:34:19,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:34:19,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:34:19,209 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,209 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:34:19,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:34:19,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] starting 2018-07-21T05:34:19,209 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,209 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] Got response: 204 No Content 2018-07-21T05:34:19,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] response is [] 2018-07-21T05:34:19,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] starting 2018-07-21T05:34:19,210 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,210 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] Got response: 204 No Content 2018-07-21T05:34:19,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] response is [] 2018-07-21T05:34:19,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:34:19,211 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,211 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:34:19,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:34:19,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:34:19,211 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,211 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:34:19,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:34:19,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] starting 2018-07-21T05:34:19,212 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,212 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] Got response: 204 No Content 2018-07-21T05:34:19,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] response is [] 2018-07-21T05:34:19,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] starting 2018-07-21T05:34:19,213 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,213 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] Got response: 204 No Content 2018-07-21T05:34:19,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] response is [] 2018-07-21T05:34:19,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:34:19,214 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,214 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:34:19,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:34:19,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:34:19,215 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,215 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:34:19,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:34:19,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] starting 2018-07-21T05:34:19,215 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,215 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] Got response: 204 No Content 2018-07-21T05:34:19,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] response is [] 2018-07-21T05:34:19,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:34:19,218 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,218 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:34:19,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:34:19,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:34:19,219 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,220 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:34:19,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:34:19,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:34:19,220 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,220 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:34:19,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:34:19,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:34:19,221 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,221 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:34:19,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:34:19,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:34:19,222 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,222 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:34:19,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:34:19,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:34:19,223 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,223 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:34:19,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:34:19,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:34:19,224 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,224 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:34:19,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:34:19,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] starting 2018-07-21T05:34:19,225 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,225 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] Got response: 204 No Content 2018-07-21T05:34:19,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] response is [] 2018-07-21T05:34:19,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] starting 2018-07-21T05:34:19,225 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,225 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] Got response: 204 No Content 2018-07-21T05:34:19,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] response is [] 2018-07-21T05:34:19,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] starting 2018-07-21T05:34:19,226 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,226 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] Got response: 204 No Content 2018-07-21T05:34:19,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] response is [] 2018-07-21T05:34:19,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] starting 2018-07-21T05:34:19,227 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,227 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] Got response: 204 No Content 2018-07-21T05:34:19,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] response is [] 2018-07-21T05:34:19,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] starting 2018-07-21T05:34:19,228 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,228 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] Got response: 204 No Content 2018-07-21T05:34:19,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] response is [] 2018-07-21T05:34:19,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] starting 2018-07-21T05:34:19,228 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,228 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] Got response: 204 No Content 2018-07-21T05:34:19,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] response is [] 2018-07-21T05:34:19,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] starting 2018-07-21T05:34:19,230 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,230 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] Got response: 204 No Content 2018-07-21T05:34:19,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] response is [] 2018-07-21T05:34:19,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] starting 2018-07-21T05:34:19,231 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,231 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] Got response: 204 No Content 2018-07-21T05:34:19,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] response is [] 2018-07-21T05:34:19,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] starting 2018-07-21T05:34:19,232 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,232 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] Got response: 204 No Content 2018-07-21T05:34:19,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] response is [] 2018-07-21T05:34:19,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:34:19,232 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,232 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:34:19,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:34:19,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] starting 2018-07-21T05:34:19,233 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,233 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] Got response: 204 No Content 2018-07-21T05:34:19,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] response is [] 2018-07-21T05:34:19,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] starting 2018-07-21T05:34:19,234 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,234 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] Got response: 204 No Content 2018-07-21T05:34:19,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] response is [] 2018-07-21T05:34:19,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:34:19,235 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,235 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:34:19,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:34:19,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] starting 2018-07-21T05:34:19,235 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,235 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] Got response: 204 No Content 2018-07-21T05:34:19,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] response is [] 2018-07-21T05:34:19,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] starting 2018-07-21T05:34:19,236 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,236 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] Got response: 204 No Content 2018-07-21T05:34:19,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] response is [] 2018-07-21T05:34:19,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:34:19,237 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,237 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:34:19,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:34:19,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] starting 2018-07-21T05:34:19,237 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,237 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] Got response: 204 No Content 2018-07-21T05:34:19,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] response is [] 2018-07-21T05:34:19,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] starting 2018-07-21T05:34:19,238 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,238 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] Got response: 204 No Content 2018-07-21T05:34:19,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] response is [] 2018-07-21T05:34:19,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:34:19,239 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,239 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:34:19,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:34:19,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] starting 2018-07-21T05:34:19,239 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:34:19,240 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,240 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] Got response: 204 No Content 2018-07-21T05:34:19,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] response is [] 2018-07-21T05:34:19,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] starting 2018-07-21T05:34:19,240 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,240 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] Got response: 204 No Content 2018-07-21T05:34:19,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] response is [] 2018-07-21T05:34:19,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:34:19,241 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,241 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:34:19,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:34:19,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] starting 2018-07-21T05:34:19,242 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,242 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] Got response: 204 No Content 2018-07-21T05:34:19,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] response is [] 2018-07-21T05:34:19,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:34:19,243 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,243 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:34:19,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:34:19,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] starting 2018-07-21T05:34:19,243 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,243 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] Got response: 204 No Content 2018-07-21T05:34:19,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] response is [] 2018-07-21T05:34:19,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] starting 2018-07-21T05:34:19,244 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,244 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] Got response: 204 No Content 2018-07-21T05:34:19,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] response is [] 2018-07-21T05:34:19,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:34:19,245 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,245 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:34:19,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:34:19,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:34:19,247 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,247 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:34:19,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:34:19,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:34:19,247 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,247 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:34:19,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:34:19,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:34:19,248 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,248 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:34:19,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:34:19,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:34:19,249 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,249 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:34:19,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:34:19,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] starting 2018-07-21T05:34:19,250 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,250 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] Got response: 204 No Content 2018-07-21T05:34:19,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] response is [] 2018-07-21T05:34:19,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] starting 2018-07-21T05:34:19,254 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,254 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] Got response: 204 No Content 2018-07-21T05:34:19,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] response is [] 2018-07-21T05:34:19,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] starting 2018-07-21T05:34:19,254 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,254 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] Got response: 204 No Content 2018-07-21T05:34:19,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] response is [] 2018-07-21T05:34:19,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] starting 2018-07-21T05:34:19,255 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,255 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] Got response: 204 No Content 2018-07-21T05:34:19,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] response is [] 2018-07-21T05:34:19,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] starting 2018-07-21T05:34:19,256 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,256 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] Got response: 204 No Content 2018-07-21T05:34:19,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] response is [] 2018-07-21T05:34:19,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] starting 2018-07-21T05:34:19,257 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,257 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] Got response: 204 No Content 2018-07-21T05:34:19,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] response is [] 2018-07-21T05:34:19,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] starting 2018-07-21T05:34:19,259 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,259 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] Got response: 204 No Content 2018-07-21T05:34:19,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] response is [] 2018-07-21T05:34:19,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] starting 2018-07-21T05:34:19,260 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,260 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] Got response: 204 No Content 2018-07-21T05:34:19,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] response is [] 2018-07-21T05:34:19,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] starting 2018-07-21T05:34:19,261 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,261 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] Got response: 204 No Content 2018-07-21T05:34:19,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] response is [] 2018-07-21T05:34:19,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] starting 2018-07-21T05:34:19,262 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,262 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] Got response: 204 No Content 2018-07-21T05:34:19,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] response is [] 2018-07-21T05:34:19,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] starting 2018-07-21T05:34:19,262 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,262 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] Got response: 204 No Content 2018-07-21T05:34:19,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] response is [] 2018-07-21T05:34:19,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] starting 2018-07-21T05:34:19,263 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,263 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] Got response: 204 No Content 2018-07-21T05:34:19,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] response is [] 2018-07-21T05:34:19,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:34:19,264 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,264 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:34:19,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:34:19,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] starting 2018-07-21T05:34:19,265 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,265 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] Got response: 204 No Content 2018-07-21T05:34:19,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] response is [] 2018-07-21T05:34:19,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] starting 2018-07-21T05:34:19,266 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,266 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] Got response: 204 No Content 2018-07-21T05:34:19,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] response is [] 2018-07-21T05:34:19,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] starting 2018-07-21T05:34:19,266 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,266 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] Got response: 204 No Content 2018-07-21T05:34:19,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] response is [] 2018-07-21T05:34:19,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:34:19,267 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,267 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:34:19,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:34:19,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] starting 2018-07-21T05:34:19,268 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,268 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] Got response: 204 No Content 2018-07-21T05:34:19,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] response is [] 2018-07-21T05:34:19,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] starting 2018-07-21T05:34:19,269 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,269 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] Got response: 204 No Content 2018-07-21T05:34:19,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] response is [] 2018-07-21T05:34:19,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] starting 2018-07-21T05:34:19,269 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,269 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] Got response: 204 No Content 2018-07-21T05:34:19,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] response is [] 2018-07-21T05:34:19,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:34:19,270 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,270 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:34:19,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:34:19,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] starting 2018-07-21T05:34:19,271 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,271 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] Got response: 204 No Content 2018-07-21T05:34:19,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] response is [] 2018-07-21T05:34:19,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] starting 2018-07-21T05:34:19,272 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,272 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] Got response: 204 No Content 2018-07-21T05:34:19,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] response is [] 2018-07-21T05:34:19,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] starting 2018-07-21T05:34:19,275 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,275 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] Got response: 204 No Content 2018-07-21T05:34:19,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] response is [] 2018-07-21T05:34:19,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:34:19,276 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,276 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:34:19,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:34:19,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] starting 2018-07-21T05:34:19,276 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,277 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] Got response: 204 No Content 2018-07-21T05:34:19,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] response is [] 2018-07-21T05:34:19,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] starting 2018-07-21T05:34:19,277 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,277 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] Got response: 204 No Content 2018-07-21T05:34:19,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] response is [] 2018-07-21T05:34:19,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] starting 2018-07-21T05:34:19,278 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,278 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] Got response: 204 No Content 2018-07-21T05:34:19,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] response is [] 2018-07-21T05:34:19,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] starting 2018-07-21T05:34:19,278 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,278 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] Got response: 204 No Content 2018-07-21T05:34:19,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] response is [] 2018-07-21T05:34:19,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] starting 2018-07-21T05:34:19,279 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,279 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] Got response: 204 No Content 2018-07-21T05:34:19,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] response is [] 2018-07-21T05:34:19,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:34:19,280 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,280 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:34:19,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:34:19,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] starting 2018-07-21T05:34:19,280 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,280 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] Got response: 204 No Content 2018-07-21T05:34:19,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] response is [] 2018-07-21T05:34:19,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] starting 2018-07-21T05:34:19,281 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,281 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] Got response: 204 No Content 2018-07-21T05:34:19,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] response is [] 2018-07-21T05:34:19,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] starting 2018-07-21T05:34:19,284 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,284 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] Got response: 204 No Content 2018-07-21T05:34:19,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] response is [] 2018-07-21T05:34:19,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] starting 2018-07-21T05:34:19,285 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,285 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] Got response: 204 No Content 2018-07-21T05:34:19,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] response is [] 2018-07-21T05:34:19,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] starting 2018-07-21T05:34:19,286 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,286 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] Got response: 204 No Content 2018-07-21T05:34:19,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] response is [] 2018-07-21T05:34:19,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] starting 2018-07-21T05:34:19,286 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,286 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] Got response: 204 No Content 2018-07-21T05:34:19,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] response is [] 2018-07-21T05:34:19,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] starting 2018-07-21T05:34:19,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] Got response: 204 No Content 2018-07-21T05:34:19,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] response is [] 2018-07-21T05:34:19,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] starting 2018-07-21T05:34:19,287 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,287 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] Got response: 204 No Content 2018-07-21T05:34:19,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] response is [] 2018-07-21T05:34:19,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] starting 2018-07-21T05:34:19,288 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,288 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] Got response: 204 No Content 2018-07-21T05:34:19,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] response is [] 2018-07-21T05:34:19,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] starting 2018-07-21T05:34:19,289 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,289 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] Got response: 204 No Content 2018-07-21T05:34:19,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] response is [] 2018-07-21T05:34:19,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] starting 2018-07-21T05:34:19,289 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,289 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] Got response: 204 No Content 2018-07-21T05:34:19,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] response is [] 2018-07-21T05:34:19,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] starting 2018-07-21T05:34:19,290 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,290 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] Got response: 204 No Content 2018-07-21T05:34:19,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] response is [] 2018-07-21T05:34:19,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] starting 2018-07-21T05:34:19,291 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,291 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] Got response: 204 No Content 2018-07-21T05:34:19,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] response is [] 2018-07-21T05:34:19,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:34:19,292 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,292 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:34:19,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:34:19,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] starting 2018-07-21T05:34:19,292 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,292 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] Got response: 204 No Content 2018-07-21T05:34:19,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] response is [] 2018-07-21T05:34:19,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] starting 2018-07-21T05:34:19,293 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,293 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] Got response: 204 No Content 2018-07-21T05:34:19,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] response is [] 2018-07-21T05:34:19,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] starting 2018-07-21T05:34:19,293 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,293 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] Got response: 204 No Content 2018-07-21T05:34:19,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] response is [] 2018-07-21T05:34:19,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] starting 2018-07-21T05:34:19,294 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,294 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] Got response: 204 No Content 2018-07-21T05:34:19,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] response is [] 2018-07-21T05:34:19,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] starting 2018-07-21T05:34:19,295 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,295 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] Got response: 204 No Content 2018-07-21T05:34:19,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] response is [] 2018-07-21T05:34:19,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] starting 2018-07-21T05:34:19,296 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,296 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] Got response: 204 No Content 2018-07-21T05:34:19,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] response is [] 2018-07-21T05:34:19,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] starting 2018-07-21T05:34:19,296 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,296 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] Got response: 204 No Content 2018-07-21T05:34:19,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] response is [] 2018-07-21T05:34:19,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] starting 2018-07-21T05:34:19,297 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,297 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] Got response: 204 No Content 2018-07-21T05:34:19,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] response is [] 2018-07-21T05:34:19,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] starting 2018-07-21T05:34:19,297 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,298 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] Got response: 204 No Content 2018-07-21T05:34:19,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] response is [] 2018-07-21T05:34:19,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] starting 2018-07-21T05:34:19,298 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,298 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] Got response: 204 No Content 2018-07-21T05:34:19,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] response is [] 2018-07-21T05:34:19,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] starting 2018-07-21T05:34:19,299 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,299 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] Got response: 204 No Content 2018-07-21T05:34:19,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] response is [] 2018-07-21T05:34:19,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] starting 2018-07-21T05:34:19,300 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,300 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] Got response: 204 No Content 2018-07-21T05:34:19,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] response is [] 2018-07-21T05:34:19,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] starting 2018-07-21T05:34:19,300 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,300 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] Got response: 204 No Content 2018-07-21T05:34:19,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] response is [] 2018-07-21T05:34:19,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] starting 2018-07-21T05:34:19,301 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,301 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] Got response: 204 No Content 2018-07-21T05:34:19,301 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:34:19,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] response is [] 2018-07-21T05:34:19,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] starting 2018-07-21T05:34:19,302 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,302 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] Got response: 204 No Content 2018-07-21T05:34:19,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] response is [] 2018-07-21T05:34:19,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] starting 2018-07-21T05:34:19,303 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,303 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] Got response: 204 No Content 2018-07-21T05:34:19,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] response is [] 2018-07-21T05:34:19,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] starting 2018-07-21T05:34:19,303 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,303 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] Got response: 204 No Content 2018-07-21T05:34:19,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] response is [] 2018-07-21T05:34:19,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:34:19,304 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,304 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:34:19,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:34:19,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:34:19,305 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,305 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:34:19,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:34:19,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:34:19,306 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,306 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:34:19,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:34:19,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] starting 2018-07-21T05:34:19,306 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,306 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] Got response: 204 No Content 2018-07-21T05:34:19,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] response is [] 2018-07-21T05:34:19,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] starting 2018-07-21T05:34:19,307 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,307 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] Got response: 204 No Content 2018-07-21T05:34:19,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] response is [] 2018-07-21T05:34:19,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] starting 2018-07-21T05:34:19,307 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,308 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] Got response: 204 No Content 2018-07-21T05:34:19,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] response is [] 2018-07-21T05:34:19,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:34:19,308 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,308 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:34:19,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:34:19,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:34:19,309 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,309 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:34:19,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:34:19,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:34:19,310 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,310 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:34:19,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:34:19,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:34:19,311 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,311 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:34:19,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:34:19,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:34:19,311 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,311 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:34:19,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:34:19,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:34:19,312 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,312 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:34:19,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:34:19,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:34:19,313 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,313 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:34:19,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:34:19,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:34:19,313 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,314 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:34:19,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:34:19,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:34:19,314 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,314 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:34:19,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:34:19,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:34:19,315 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,315 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:34:19,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:34:19,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:34:19,315 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,315 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:34:19,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:34:19,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:34:19,316 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,316 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:34:19,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:34:19,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:34:19,316 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,316 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:34:19,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:34:19,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:34:19,317 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,317 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:34:19,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:34:19,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] starting 2018-07-21T05:34:19,318 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,318 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] Got response: 204 No Content 2018-07-21T05:34:19,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] response is [] 2018-07-21T05:34:19,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] starting 2018-07-21T05:34:19,318 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,318 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] Got response: 204 No Content 2018-07-21T05:34:19,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] response is [] 2018-07-21T05:34:19,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] starting 2018-07-21T05:34:19,319 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,319 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] Got response: 204 No Content 2018-07-21T05:34:19,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] response is [] 2018-07-21T05:34:19,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:34:19,320 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,320 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:34:19,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:34:19,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] starting 2018-07-21T05:34:19,321 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,321 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] Got response: 204 No Content 2018-07-21T05:34:19,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] response is [] 2018-07-21T05:34:19,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] starting 2018-07-21T05:34:19,321 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,321 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] Got response: 204 No Content 2018-07-21T05:34:19,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] response is [] 2018-07-21T05:34:19,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] starting 2018-07-21T05:34:19,322 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,322 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] Got response: 204 No Content 2018-07-21T05:34:19,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] response is [] 2018-07-21T05:34:19,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] starting 2018-07-21T05:34:19,323 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,323 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] Got response: 204 No Content 2018-07-21T05:34:19,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] response is [] 2018-07-21T05:34:19,323 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] starting 2018-07-21T05:34:19,327 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,327 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] Got response: 204 No Content 2018-07-21T05:34:19,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] response is [] 2018-07-21T05:34:19,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] starting 2018-07-21T05:34:19,328 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,328 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] Got response: 204 No Content 2018-07-21T05:34:19,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] response is [] 2018-07-21T05:34:19,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:34:19,329 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,329 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:34:19,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:34:19,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:34:19,330 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,330 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:34:19,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:34:19,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:34:19,330 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,330 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:34:19,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:34:19,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:34:19,331 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,331 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:34:19,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:34:19,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:34:19,332 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,332 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:34:19,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:34:19,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:34:19,332 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,332 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:34:19,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:34:19,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:34:19,333 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,333 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:34:19,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:34:19,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:34:19,334 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,334 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:34:19,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:34:19,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] starting 2018-07-21T05:34:19,335 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,335 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] Got response: 204 No Content 2018-07-21T05:34:19,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] response is [] 2018-07-21T05:34:19,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] starting 2018-07-21T05:34:19,336 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,336 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] Got response: 204 No Content 2018-07-21T05:34:19,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] response is [] 2018-07-21T05:34:19,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] starting 2018-07-21T05:34:19,336 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,336 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] Got response: 204 No Content 2018-07-21T05:34:19,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] response is [] 2018-07-21T05:34:19,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] starting 2018-07-21T05:34:19,337 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,337 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] Got response: 204 No Content 2018-07-21T05:34:19,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] response is [] 2018-07-21T05:34:19,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] starting 2018-07-21T05:34:19,338 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,338 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] Got response: 204 No Content 2018-07-21T05:34:19,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] response is [] 2018-07-21T05:34:19,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] starting 2018-07-21T05:34:19,339 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,339 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] Got response: 204 No Content 2018-07-21T05:34:19,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] response is [] 2018-07-21T05:34:19,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:34:19,340 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,340 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:34:19,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:34:19,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] starting 2018-07-21T05:34:19,341 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,341 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] Got response: 204 No Content 2018-07-21T05:34:19,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] response is [] 2018-07-21T05:34:19,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] starting 2018-07-21T05:34:19,341 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,341 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] Got response: 204 No Content 2018-07-21T05:34:19,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] response is [] 2018-07-21T05:34:19,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] starting 2018-07-21T05:34:19,342 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,342 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] Got response: 204 No Content 2018-07-21T05:34:19,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] response is [] 2018-07-21T05:34:19,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:34:19,343 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,343 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:34:19,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:34:19,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] starting 2018-07-21T05:34:19,344 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,344 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] Got response: 204 No Content 2018-07-21T05:34:19,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] response is [] 2018-07-21T05:34:19,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] starting 2018-07-21T05:34:19,345 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,345 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] Got response: 204 No Content 2018-07-21T05:34:19,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] response is [] 2018-07-21T05:34:19,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] starting 2018-07-21T05:34:19,345 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,345 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] Got response: 204 No Content 2018-07-21T05:34:19,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] response is [] 2018-07-21T05:34:19,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:34:19,346 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,346 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:34:19,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:34:19,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] starting 2018-07-21T05:34:19,346 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,347 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] Got response: 204 No Content 2018-07-21T05:34:19,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] response is [] 2018-07-21T05:34:19,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] starting 2018-07-21T05:34:19,347 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,347 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] Got response: 204 No Content 2018-07-21T05:34:19,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] response is [] 2018-07-21T05:34:19,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] starting 2018-07-21T05:34:19,348 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,348 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] Got response: 204 No Content 2018-07-21T05:34:19,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] response is [] 2018-07-21T05:34:19,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:34:19,349 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,349 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:34:19,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:34:19,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] starting 2018-07-21T05:34:19,349 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,349 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] Got response: 204 No Content 2018-07-21T05:34:19,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] response is [] 2018-07-21T05:34:19,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] starting 2018-07-21T05:34:19,350 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,350 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] Got response: 204 No Content 2018-07-21T05:34:19,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] response is [] 2018-07-21T05:34:19,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] starting 2018-07-21T05:34:19,351 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,351 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] Got response: 204 No Content 2018-07-21T05:34:19,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] response is [] 2018-07-21T05:34:19,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:34:19,351 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,351 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:34:19,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:34:19,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] starting 2018-07-21T05:34:19,352 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,352 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] Got response: 204 No Content 2018-07-21T05:34:19,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] response is [] 2018-07-21T05:34:19,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] starting 2018-07-21T05:34:19,353 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,353 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] Got response: 204 No Content 2018-07-21T05:34:19,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] response is [] 2018-07-21T05:34:19,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] starting 2018-07-21T05:34:19,354 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,354 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] Got response: 204 No Content 2018-07-21T05:34:19,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] response is [] 2018-07-21T05:34:19,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:34:19,354 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,354 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:34:19,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:34:19,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] starting 2018-07-21T05:34:19,355 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,355 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] Got response: 204 No Content 2018-07-21T05:34:19,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] response is [] 2018-07-21T05:34:19,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] starting 2018-07-21T05:34:19,356 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,356 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] Got response: 204 No Content 2018-07-21T05:34:19,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] response is [] 2018-07-21T05:34:19,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] starting 2018-07-21T05:34:19,356 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,356 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] Got response: 204 No Content 2018-07-21T05:34:19,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] response is [] 2018-07-21T05:34:19,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] starting 2018-07-21T05:34:19,357 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,357 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] Got response: 204 No Content 2018-07-21T05:34:19,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] response is [] 2018-07-21T05:34:19,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] starting 2018-07-21T05:34:19,358 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,358 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] Got response: 204 No Content 2018-07-21T05:34:19,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] response is [] 2018-07-21T05:34:19,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] starting 2018-07-21T05:34:19,358 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,358 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] Got response: 204 No Content 2018-07-21T05:34:19,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] response is [] 2018-07-21T05:34:19,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:34:19,359 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,359 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:34:19,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:34:19,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] starting 2018-07-21T05:34:19,360 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,360 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] Got response: 204 No Content 2018-07-21T05:34:19,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] response is [] 2018-07-21T05:34:19,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] starting 2018-07-21T05:34:19,360 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,360 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] Got response: 204 No Content 2018-07-21T05:34:19,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] response is [] 2018-07-21T05:34:19,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] starting 2018-07-21T05:34:19,361 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,361 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] Got response: 204 No Content 2018-07-21T05:34:19,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] response is [] 2018-07-21T05:34:19,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] starting 2018-07-21T05:34:19,362 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,362 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] Got response: 204 No Content 2018-07-21T05:34:19,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] response is [] 2018-07-21T05:34:19,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] starting 2018-07-21T05:34:19,363 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,363 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] Got response: 204 No Content 2018-07-21T05:34:19,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] response is [] 2018-07-21T05:34:19,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] starting 2018-07-21T05:34:19,363 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,363 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] Got response: 204 No Content 2018-07-21T05:34:19,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] response is [] 2018-07-21T05:34:19,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] starting 2018-07-21T05:34:19,364 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,364 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] Got response: 204 No Content 2018-07-21T05:34:19,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] response is [] 2018-07-21T05:34:19,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] starting 2018-07-21T05:34:19,365 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,365 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] Got response: 204 No Content 2018-07-21T05:34:19,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] response is [] 2018-07-21T05:34:19,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:34:19,366 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,366 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:34:19,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:34:19,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:34:19,366 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,366 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:34:19,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:34:19,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] starting 2018-07-21T05:34:19,369 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,369 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] Got response: 204 No Content 2018-07-21T05:34:19,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] response is [] 2018-07-21T05:34:19,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:34:19,372 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,372 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:34:19,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:34:19,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:34:19,373 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,373 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:34:19,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:34:19,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:34:19,374 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,375 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:34:19,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:34:19,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:34:19,375 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,375 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:34:19,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:34:19,375 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:34:19,376 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,376 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:34:19,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:34:19,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:34:19,377 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,377 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:34:19,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:34:19,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:34:19,378 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,378 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:34:19,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:34:19,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] starting 2018-07-21T05:34:19,378 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,378 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] Got response: 204 No Content 2018-07-21T05:34:19,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] response is [] 2018-07-21T05:34:19,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] starting 2018-07-21T05:34:19,379 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,379 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] Got response: 204 No Content 2018-07-21T05:34:19,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] response is [] 2018-07-21T05:34:19,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] starting 2018-07-21T05:34:19,380 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,380 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] Got response: 204 No Content 2018-07-21T05:34:19,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] response is [] 2018-07-21T05:34:19,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] starting 2018-07-21T05:34:19,381 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,381 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] Got response: 204 No Content 2018-07-21T05:34:19,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] response is [] 2018-07-21T05:34:19,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] starting 2018-07-21T05:34:19,381 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,381 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] Got response: 204 No Content 2018-07-21T05:34:19,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] response is [] 2018-07-21T05:34:19,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] starting 2018-07-21T05:34:19,382 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,382 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] Got response: 204 No Content 2018-07-21T05:34:19,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] response is [] 2018-07-21T05:34:19,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] starting 2018-07-21T05:34:19,383 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,383 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] Got response: 204 No Content 2018-07-21T05:34:19,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] response is [] 2018-07-21T05:34:19,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] starting 2018-07-21T05:34:19,384 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,384 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] Got response: 204 No Content 2018-07-21T05:34:19,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] response is [] 2018-07-21T05:34:19,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] starting 2018-07-21T05:34:19,384 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,384 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] Got response: 204 No Content 2018-07-21T05:34:19,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] response is [] 2018-07-21T05:34:19,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] starting 2018-07-21T05:34:19,385 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,385 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] Got response: 204 No Content 2018-07-21T05:34:19,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] response is [] 2018-07-21T05:34:19,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] starting 2018-07-21T05:34:19,386 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,386 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] Got response: 204 No Content 2018-07-21T05:34:19,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] response is [] 2018-07-21T05:34:19,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] starting 2018-07-21T05:34:19,386 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,386 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] Got response: 204 No Content 2018-07-21T05:34:19,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] response is [] 2018-07-21T05:34:19,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] starting 2018-07-21T05:34:19,387 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,387 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] Got response: 204 No Content 2018-07-21T05:34:19,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] response is [] 2018-07-21T05:34:19,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] starting 2018-07-21T05:34:19,388 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,388 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] Got response: 204 No Content 2018-07-21T05:34:19,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] response is [] 2018-07-21T05:34:19,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] starting 2018-07-21T05:34:19,389 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,389 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] Got response: 204 No Content 2018-07-21T05:34:19,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] response is [] 2018-07-21T05:34:19,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] starting 2018-07-21T05:34:19,389 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,389 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] Got response: 204 No Content 2018-07-21T05:34:19,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] response is [] 2018-07-21T05:34:19,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] starting 2018-07-21T05:34:19,390 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,390 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] Got response: 204 No Content 2018-07-21T05:34:19,390 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] response is [] 2018-07-21T05:34:19,390 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] starting 2018-07-21T05:34:19,391 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,391 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] Got response: 204 No Content 2018-07-21T05:34:19,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] response is [] 2018-07-21T05:34:19,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] starting 2018-07-21T05:34:19,391 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,391 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] Got response: 204 No Content 2018-07-21T05:34:19,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] response is [] 2018-07-21T05:34:19,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] starting 2018-07-21T05:34:19,392 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,392 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] Got response: 204 No Content 2018-07-21T05:34:19,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] response is [] 2018-07-21T05:34:19,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] starting 2018-07-21T05:34:19,393 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,393 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] Got response: 204 No Content 2018-07-21T05:34:19,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] response is [] 2018-07-21T05:34:19,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] starting 2018-07-21T05:34:19,393 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,393 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] Got response: 204 No Content 2018-07-21T05:34:19,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] response is [] 2018-07-21T05:34:19,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] starting 2018-07-21T05:34:19,394 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,394 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] Got response: 204 No Content 2018-07-21T05:34:19,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] response is [] 2018-07-21T05:34:19,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] starting 2018-07-21T05:34:19,395 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,395 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] Got response: 204 No Content 2018-07-21T05:34:19,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] response is [] 2018-07-21T05:34:19,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] starting 2018-07-21T05:34:19,395 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,395 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] Got response: 204 No Content 2018-07-21T05:34:19,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] response is [] 2018-07-21T05:34:19,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] starting 2018-07-21T05:34:19,396 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,396 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] Got response: 204 No Content 2018-07-21T05:34:19,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] response is [] 2018-07-21T05:34:19,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] starting 2018-07-21T05:34:19,397 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,397 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] Got response: 204 No Content 2018-07-21T05:34:19,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] response is [] 2018-07-21T05:34:19,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] starting 2018-07-21T05:34:19,398 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,398 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] Got response: 204 No Content 2018-07-21T05:34:19,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] response is [] 2018-07-21T05:34:19,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] starting 2018-07-21T05:34:19,398 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,398 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] Got response: 204 No Content 2018-07-21T05:34:19,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] response is [] 2018-07-21T05:34:19,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:34:19,399 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,399 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:34:19,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:34:19,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:34:19,400 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,400 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:34:19,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:34:19,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] starting 2018-07-21T05:34:19,401 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,401 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] Got response: 204 No Content 2018-07-21T05:34:19,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] response is [] 2018-07-21T05:34:19,401 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:34:19,402 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,402 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:34:19,402 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:34:19,402 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:34:19,402 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,402 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:34:19,402 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:34:19,402 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:34:19,403 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,403 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:34:19,403 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:34:19,403 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:34:19,404 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,404 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:34:19,404 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:34:19,404 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:34:19,404 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,404 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:34:19,404 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:34:19,404 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:34:19,405 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,405 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:34:19,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:34:19,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:34:19,406 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,406 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:34:19,406 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:34:19,406 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:34:19,406 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,406 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:34:19,406 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:34:19,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] starting 2018-07-21T05:34:19,407 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,407 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] Got response: 204 No Content 2018-07-21T05:34:19,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] response is [] 2018-07-21T05:34:19,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] starting 2018-07-21T05:34:19,408 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,408 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] Got response: 204 No Content 2018-07-21T05:34:19,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] response is [] 2018-07-21T05:34:19,408 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] starting 2018-07-21T05:34:19,408 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,409 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] Got response: 204 No Content 2018-07-21T05:34:19,409 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] response is [] 2018-07-21T05:34:19,409 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] starting 2018-07-21T05:34:19,410 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,410 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] Got response: 204 No Content 2018-07-21T05:34:19,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] response is [] 2018-07-21T05:34:19,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] starting 2018-07-21T05:34:19,411 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,411 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] Got response: 204 No Content 2018-07-21T05:34:19,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] response is [] 2018-07-21T05:34:19,411 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] starting 2018-07-21T05:34:19,411 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,411 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] Got response: 204 No Content 2018-07-21T05:34:19,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] response is [] 2018-07-21T05:34:19,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] starting 2018-07-21T05:34:19,412 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,412 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] Got response: 204 No Content 2018-07-21T05:34:19,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] response is [] 2018-07-21T05:34:19,412 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] starting 2018-07-21T05:34:19,413 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,413 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] Got response: 204 No Content 2018-07-21T05:34:19,413 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] response is [] 2018-07-21T05:34:19,413 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] starting 2018-07-21T05:34:19,414 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,414 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] Got response: 204 No Content 2018-07-21T05:34:19,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] response is [] 2018-07-21T05:34:19,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] starting 2018-07-21T05:34:19,414 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,414 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] Got response: 204 No Content 2018-07-21T05:34:19,414 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] response is [] 2018-07-21T05:34:19,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] starting 2018-07-21T05:34:19,415 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,415 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] Got response: 204 No Content 2018-07-21T05:34:19,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] response is [] 2018-07-21T05:34:19,415 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] starting 2018-07-21T05:34:19,416 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,416 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] Got response: 204 No Content 2018-07-21T05:34:19,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] response is [] 2018-07-21T05:34:19,416 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] starting 2018-07-21T05:34:19,416 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,416 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] Got response: 204 No Content 2018-07-21T05:34:19,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] response is [] 2018-07-21T05:34:19,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] starting 2018-07-21T05:34:19,417 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,417 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] Got response: 204 No Content 2018-07-21T05:34:19,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] response is [] 2018-07-21T05:34:19,417 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] starting 2018-07-21T05:34:19,418 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,418 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] Got response: 204 No Content 2018-07-21T05:34:19,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] response is [] 2018-07-21T05:34:19,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] starting 2018-07-21T05:34:19,419 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,419 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] Got response: 204 No Content 2018-07-21T05:34:19,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] response is [] 2018-07-21T05:34:19,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] starting 2018-07-21T05:34:19,419 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,419 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] Got response: 204 No Content 2018-07-21T05:34:19,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] response is [] 2018-07-21T05:34:19,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] starting 2018-07-21T05:34:19,420 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,420 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] Got response: 204 No Content 2018-07-21T05:34:19,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] response is [] 2018-07-21T05:34:19,420 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] starting 2018-07-21T05:34:19,421 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,421 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] Got response: 204 No Content 2018-07-21T05:34:19,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] response is [] 2018-07-21T05:34:19,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] starting 2018-07-21T05:34:19,422 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,422 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] Got response: 204 No Content 2018-07-21T05:34:19,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] response is [] 2018-07-21T05:34:19,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] starting 2018-07-21T05:34:19,422 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,422 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] Got response: 204 No Content 2018-07-21T05:34:19,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] response is [] 2018-07-21T05:34:19,422 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] starting 2018-07-21T05:34:19,423 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,423 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] Got response: 204 No Content 2018-07-21T05:34:19,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] response is [] 2018-07-21T05:34:19,423 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] starting 2018-07-21T05:34:19,424 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,424 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] Got response: 204 No Content 2018-07-21T05:34:19,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] response is [] 2018-07-21T05:34:19,424 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] starting 2018-07-21T05:34:19,425 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,425 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] Got response: 204 No Content 2018-07-21T05:34:19,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] response is [] 2018-07-21T05:34:19,425 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] starting 2018-07-21T05:34:19,426 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,426 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] Got response: 204 No Content 2018-07-21T05:34:19,426 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] response is [] 2018-07-21T05:34:19,426 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] starting 2018-07-21T05:34:19,427 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,427 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] Got response: 204 No Content 2018-07-21T05:34:19,427 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] response is [] 2018-07-21T05:34:19,427 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:34:19,428 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,428 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:34:19,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:34:19,428 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] starting 2018-07-21T05:34:19,429 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,429 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] Got response: 204 No Content 2018-07-21T05:34:19,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] response is [] 2018-07-21T05:34:19,429 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:34:19,430 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,430 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:34:19,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:34:19,430 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] starting 2018-07-21T05:34:19,431 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,431 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] Got response: 204 No Content 2018-07-21T05:34:19,431 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] response is [] 2018-07-21T05:34:19,431 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] starting 2018-07-21T05:34:19,432 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,432 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] Got response: 204 No Content 2018-07-21T05:34:19,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] response is [] 2018-07-21T05:34:19,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:34:19,432 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,432 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:34:19,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:34:19,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:34:19,433 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,433 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:34:19,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:34:19,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:34:19,434 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,434 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:34:19,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:34:19,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:34:19,435 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,435 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:34:19,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:34:19,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:34:19,436 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,436 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:34:19,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:34:19,436 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:34:19,437 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,437 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:34:19,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:34:19,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:34:19,437 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,437 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:34:19,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:34:19,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:34:19,438 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,438 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:34:19,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:34:19,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] starting 2018-07-21T05:34:19,439 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,439 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] Got response: 204 No Content 2018-07-21T05:34:19,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] response is [] 2018-07-21T05:34:19,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] starting 2018-07-21T05:34:19,439 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,439 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] Got response: 204 No Content 2018-07-21T05:34:19,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] response is [] 2018-07-21T05:34:19,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] starting 2018-07-21T05:34:19,440 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,440 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] Got response: 204 No Content 2018-07-21T05:34:19,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] response is [] 2018-07-21T05:34:19,440 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] starting 2018-07-21T05:34:19,441 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,441 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] Got response: 204 No Content 2018-07-21T05:34:19,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] response is [] 2018-07-21T05:34:19,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] starting 2018-07-21T05:34:19,441 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,441 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] Got response: 204 No Content 2018-07-21T05:34:19,441 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] response is [] 2018-07-21T05:34:19,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] starting 2018-07-21T05:34:19,442 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,442 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] Got response: 204 No Content 2018-07-21T05:34:19,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] response is [] 2018-07-21T05:34:19,442 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] starting 2018-07-21T05:34:19,443 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,443 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] Got response: 204 No Content 2018-07-21T05:34:19,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] response is [] 2018-07-21T05:34:19,443 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] starting 2018-07-21T05:34:19,444 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,444 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] Got response: 204 No Content 2018-07-21T05:34:19,444 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] response is [] 2018-07-21T05:34:19,444 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] starting 2018-07-21T05:34:19,444 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,444 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] Got response: 204 No Content 2018-07-21T05:34:19,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] response is [] 2018-07-21T05:34:19,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] starting 2018-07-21T05:34:19,446 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,446 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] Got response: 204 No Content 2018-07-21T05:34:19,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] response is [] 2018-07-21T05:34:19,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] starting 2018-07-21T05:34:19,446 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,446 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] Got response: 204 No Content 2018-07-21T05:34:19,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] response is [] 2018-07-21T05:34:19,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:34:19,447 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,447 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:34:19,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:34:19,447 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:34:19,448 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,448 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:34:19,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:34:19,448 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:34:19,449 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,449 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:34:19,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:34:19,449 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:34:19,450 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,450 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:34:19,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:34:19,450 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:34:19,451 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,451 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:34:19,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:34:19,451 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:34:19,451 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,451 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:34:19,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:34:19,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:34:19,452 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,452 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:34:19,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:34:19,452 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:34:19,453 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,453 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:34:19,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:34:19,453 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:34:19,454 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,454 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:34:19,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:34:19,454 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:34:19,455 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,455 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:34:19,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:34:19,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:34:19,455 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,455 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:34:19,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:34:19,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:34:19,456 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,456 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:34:19,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:34:19,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:34:19,457 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,457 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:34:19,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:34:19,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:34:19,457 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,457 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:34:19,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:34:19,457 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:34:19,458 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,458 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:34:19,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:34:19,458 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:34:19,459 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,459 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:34:19,459 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:34:19,459 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:34:19,459 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,459 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:34:19,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:34:19,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:34:19,460 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,460 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:34:19,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:34:19,460 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:34:19,461 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,461 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:34:19,461 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:34:19,461 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] starting 2018-07-21T05:34:19,462 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,462 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] Got response: 204 No Content 2018-07-21T05:34:19,462 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] response is [] 2018-07-21T05:34:19,462 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] starting 2018-07-21T05:34:19,463 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,463 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] Got response: 204 No Content 2018-07-21T05:34:19,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] response is [] 2018-07-21T05:34:19,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] starting 2018-07-21T05:34:19,463 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,463 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] Got response: 204 No Content 2018-07-21T05:34:19,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] response is [] 2018-07-21T05:34:19,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] starting 2018-07-21T05:34:19,464 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,464 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] Got response: 204 No Content 2018-07-21T05:34:19,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] response is [] 2018-07-21T05:34:19,464 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] starting 2018-07-21T05:34:19,465 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,465 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] Got response: 204 No Content 2018-07-21T05:34:19,475 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] response is [] 2018-07-21T05:34:19,475 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] starting 2018-07-21T05:34:19,477 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,477 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] Got response: 204 No Content 2018-07-21T05:34:19,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] response is [] 2018-07-21T05:34:19,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] starting 2018-07-21T05:34:19,478 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,478 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] Got response: 204 No Content 2018-07-21T05:34:19,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] response is [] 2018-07-21T05:34:19,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] starting 2018-07-21T05:34:19,479 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,479 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] Got response: 204 No Content 2018-07-21T05:34:19,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] response is [] 2018-07-21T05:34:19,479 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] starting 2018-07-21T05:34:19,480 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,480 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] Got response: 204 No Content 2018-07-21T05:34:19,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] response is [] 2018-07-21T05:34:19,480 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] starting 2018-07-21T05:34:19,480 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,480 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] Got response: 204 No Content 2018-07-21T05:34:19,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] response is [] 2018-07-21T05:34:19,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:34:19,481 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,481 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:34:19,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:34:19,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:34:19,482 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,482 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:34:19,482 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:34:19,482 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:34:19,483 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,483 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:34:19,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:34:19,483 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:34:19,484 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,484 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:34:19,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:34:19,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:34:19,484 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,484 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:34:19,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:34:19,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:34:19,485 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,485 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:34:19,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:34:19,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:34:19,486 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,486 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:34:19,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:34:19,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:34:19,487 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,487 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:34:19,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:34:19,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:34:19,487 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,487 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:34:19,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:34:19,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:34:19,488 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,488 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:34:19,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:34:19,488 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:34:19,489 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,489 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:34:19,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:34:19,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:34:19,489 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,489 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:34:19,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:34:19,489 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:34:19,490 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,490 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:34:19,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:34:19,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:34:19,490 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,490 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:34:19,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:34:19,490 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:34:19,491 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,492 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:34:19,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:34:19,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:34:19,492 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,492 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:34:19,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:34:19,492 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:34:19,493 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,493 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:34:19,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:34:19,493 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:34:19,494 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,494 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:34:19,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:34:19,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:34:19,494 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,494 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:34:19,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:34:19,494 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:34:19,495 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,495 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:34:19,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:34:19,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:34:19,495 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,495 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:34:19,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:34:19,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:34:19,496 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,496 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:34:19,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:34:19,496 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:34:19,497 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,497 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:34:19,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:34:19,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:34:19,497 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,497 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:34:19,497 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:34:19,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:34:19,498 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,498 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:34:19,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:34:19,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:34:19,498 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,498 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:34:19,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:34:19,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:34:19,499 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,499 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:34:19,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:34:19,499 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:34:19,500 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,500 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:34:19,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:34:19,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:34:19,500 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,500 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:34:19,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:34:19,500 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:34:19,501 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,501 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:34:19,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:34:19,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:34:19,501 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,501 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:34:19,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:34:19,501 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:34:19,502 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,502 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:34:19,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:34:19,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:34:19,502 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,503 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:34:19,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:34:19,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:34:19,503 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,503 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:34:19,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:34:19,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:34:19,504 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,504 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:34:19,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:34:19,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:34:19,505 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,505 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:34:19,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:34:19,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:34:19,506 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,506 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:34:19,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:34:19,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:34:19,506 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,506 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:34:19,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:34:19,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:34:19,507 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,507 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:34:19,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:34:19,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:34:19,508 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:19 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:19,508 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:34:19,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:34:49,239 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:34:49,301 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:34:49,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:34:49,510 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,510 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:34:49,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:34:49,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:34:49,511 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,511 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:34:49,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:34:49,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:34:49,512 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,512 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:34:49,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:34:49,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:34:49,513 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,513 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:34:49,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:34:49,513 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:34:49,514 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,514 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:34:49,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:34:49,514 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:34:49,515 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,515 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:34:49,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:34:49,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:34:49,515 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,515 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:34:49,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:34:49,515 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:34:49,516 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,516 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:34:49,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:34:49,516 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:34:49,517 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,517 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:34:49,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:34:49,517 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] starting 2018-07-21T05:34:49,518 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,518 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] Got response: 204 No Content 2018-07-21T05:34:49,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] response is [] 2018-07-21T05:34:49,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] starting 2018-07-21T05:34:49,518 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,518 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] Got response: 204 No Content 2018-07-21T05:34:49,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] response is [] 2018-07-21T05:34:49,518 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:34:49,519 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,519 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:34:49,519 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:34:49,519 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] starting 2018-07-21T05:34:49,520 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,520 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] Got response: 204 No Content 2018-07-21T05:34:49,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] response is [] 2018-07-21T05:34:49,520 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:34:49,521 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,521 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:34:49,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:34:49,521 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:34:49,522 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,522 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:34:49,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:34:49,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] starting 2018-07-21T05:34:49,522 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,522 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] Got response: 204 No Content 2018-07-21T05:34:49,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] response is [] 2018-07-21T05:34:49,522 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] starting 2018-07-21T05:34:49,523 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,523 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] Got response: 204 No Content 2018-07-21T05:34:49,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] response is [] 2018-07-21T05:34:49,523 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:34:49,524 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,524 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:34:49,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:34:49,524 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:34:49,524 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,524 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:34:49,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:34:49,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] starting 2018-07-21T05:34:49,525 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,525 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] Got response: 204 No Content 2018-07-21T05:34:49,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] response is [] 2018-07-21T05:34:49,525 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] starting 2018-07-21T05:34:49,526 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,526 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] Got response: 204 No Content 2018-07-21T05:34:49,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] response is [] 2018-07-21T05:34:49,526 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:34:49,527 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,527 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:34:49,527 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:34:49,527 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:34:49,527 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,527 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:34:49,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:34:49,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] starting 2018-07-21T05:34:49,528 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,528 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] Got response: 204 No Content 2018-07-21T05:34:49,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] response is [] 2018-07-21T05:34:49,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] starting 2018-07-21T05:34:49,529 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,529 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] Got response: 204 No Content 2018-07-21T05:34:49,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] response is [] 2018-07-21T05:34:49,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] starting 2018-07-21T05:34:49,529 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,529 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] Got response: 204 No Content 2018-07-21T05:34:49,529 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] response is [] 2018-07-21T05:34:49,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:34:49,530 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,530 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:34:49,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:34:49,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:34:49,531 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,531 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:34:49,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:34:49,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:34:49,532 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,532 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:34:49,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:34:49,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] starting 2018-07-21T05:34:49,533 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,533 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] Got response: 204 No Content 2018-07-21T05:34:49,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] response is [] 2018-07-21T05:34:49,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] starting 2018-07-21T05:34:49,533 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,533 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] Got response: 204 No Content 2018-07-21T05:34:49,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] response is [] 2018-07-21T05:34:49,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:34:49,534 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,534 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:34:49,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:34:49,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:34:49,540 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,540 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:34:49,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:34:49,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] starting 2018-07-21T05:34:49,541 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,541 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] Got response: 204 No Content 2018-07-21T05:34:49,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] response is [] 2018-07-21T05:34:49,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] starting 2018-07-21T05:34:49,542 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,542 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] Got response: 204 No Content 2018-07-21T05:34:49,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] response is [] 2018-07-21T05:34:49,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] starting 2018-07-21T05:34:49,542 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,542 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] Got response: 204 No Content 2018-07-21T05:34:49,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] response is [] 2018-07-21T05:34:49,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:34:49,543 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,543 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:34:49,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:34:49,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:34:49,544 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,544 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:34:49,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:34:49,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] starting 2018-07-21T05:34:49,545 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,545 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] Got response: 204 No Content 2018-07-21T05:34:49,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] response is [] 2018-07-21T05:34:49,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] starting 2018-07-21T05:34:49,546 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,546 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] Got response: 204 No Content 2018-07-21T05:34:49,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] response is [] 2018-07-21T05:34:49,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] starting 2018-07-21T05:34:49,546 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,546 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] Got response: 204 No Content 2018-07-21T05:34:49,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] response is [] 2018-07-21T05:34:49,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] starting 2018-07-21T05:34:49,547 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,547 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] Got response: 204 No Content 2018-07-21T05:34:49,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] response is [] 2018-07-21T05:34:49,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] starting 2018-07-21T05:34:49,548 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,548 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] Got response: 204 No Content 2018-07-21T05:34:49,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] response is [] 2018-07-21T05:34:49,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] starting 2018-07-21T05:34:49,549 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,549 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] Got response: 204 No Content 2018-07-21T05:34:49,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] response is [] 2018-07-21T05:34:49,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] starting 2018-07-21T05:34:49,549 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,549 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] Got response: 204 No Content 2018-07-21T05:34:49,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] response is [] 2018-07-21T05:34:49,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] starting 2018-07-21T05:34:49,550 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,550 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] Got response: 204 No Content 2018-07-21T05:34:49,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] response is [] 2018-07-21T05:34:49,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] starting 2018-07-21T05:34:49,550 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,551 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] Got response: 204 No Content 2018-07-21T05:34:49,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] response is [] 2018-07-21T05:34:49,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:34:49,551 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,551 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:34:49,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:34:49,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] starting 2018-07-21T05:34:49,552 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,552 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] Got response: 204 No Content 2018-07-21T05:34:49,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] response is [] 2018-07-21T05:34:49,552 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:34:49,553 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,553 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:34:49,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:34:49,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] starting 2018-07-21T05:34:49,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,554 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] Got response: 204 No Content 2018-07-21T05:34:49,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] response is [] 2018-07-21T05:34:49,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] starting 2018-07-21T05:34:49,555 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,555 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] Got response: 204 No Content 2018-07-21T05:34:49,555 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] response is [] 2018-07-21T05:34:49,555 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:34:49,556 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,556 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:34:49,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:34:49,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:34:49,556 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,556 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:34:49,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:34:49,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:34:49,557 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,557 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:34:49,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:34:49,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] starting 2018-07-21T05:34:49,558 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,558 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] Got response: 204 No Content 2018-07-21T05:34:49,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] response is [] 2018-07-21T05:34:49,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:34:49,558 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,558 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:34:49,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:34:49,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:34:49,559 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,559 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:34:49,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:34:49,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:34:49,560 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,560 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:34:49,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:34:49,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:34:49,560 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,560 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:34:49,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:34:49,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:34:49,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,561 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:34:49,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:34:49,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:34:49,562 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,562 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:34:49,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:34:49,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:34:49,562 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,562 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:34:49,562 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:34:49,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:34:49,563 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,563 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:34:49,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:34:49,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] starting 2018-07-21T05:34:49,564 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,564 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] Got response: 204 No Content 2018-07-21T05:34:49,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] response is [] 2018-07-21T05:34:49,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:34:49,564 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,564 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:34:49,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:34:49,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] starting 2018-07-21T05:34:49,565 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,565 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] Got response: 204 No Content 2018-07-21T05:34:49,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] response is [] 2018-07-21T05:34:49,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:34:49,565 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,565 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:34:49,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:34:49,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:34:49,566 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,566 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:34:49,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:34:49,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] starting 2018-07-21T05:34:49,566 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,566 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] Got response: 204 No Content 2018-07-21T05:34:49,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] response is [] 2018-07-21T05:34:49,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] starting 2018-07-21T05:34:49,567 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,567 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] Got response: 204 No Content 2018-07-21T05:34:49,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] response is [] 2018-07-21T05:34:49,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:34:49,568 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,568 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:34:49,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:34:49,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] starting 2018-07-21T05:34:49,568 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,568 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] Got response: 204 No Content 2018-07-21T05:34:49,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] response is [] 2018-07-21T05:34:49,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] starting 2018-07-21T05:34:49,569 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,569 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] Got response: 204 No Content 2018-07-21T05:34:49,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] response is [] 2018-07-21T05:34:49,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:34:49,569 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,569 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:34:49,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:34:49,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:34:49,570 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,570 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:34:49,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:34:49,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:34:49,571 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,571 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:34:49,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:34:49,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] starting 2018-07-21T05:34:49,571 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,571 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] Got response: 204 No Content 2018-07-21T05:34:49,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] response is [] 2018-07-21T05:34:49,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] starting 2018-07-21T05:34:49,572 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,572 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] Got response: 204 No Content 2018-07-21T05:34:49,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] response is [] 2018-07-21T05:34:49,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:34:49,572 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,572 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:34:49,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:34:49,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:34:49,573 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,573 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:34:49,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:34:49,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:34:49,573 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,573 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:34:49,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:34:49,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] starting 2018-07-21T05:34:49,574 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,574 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] Got response: 204 No Content 2018-07-21T05:34:49,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] response is [] 2018-07-21T05:34:49,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] starting 2018-07-21T05:34:49,575 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,575 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] Got response: 204 No Content 2018-07-21T05:34:49,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] response is [] 2018-07-21T05:34:49,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:34:49,575 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,575 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:34:49,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:34:49,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:34:49,576 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,576 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:34:49,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:34:49,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] starting 2018-07-21T05:34:49,576 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,576 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] Got response: 204 No Content 2018-07-21T05:34:49,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] response is [] 2018-07-21T05:34:49,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] starting 2018-07-21T05:34:49,577 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,577 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] Got response: 204 No Content 2018-07-21T05:34:49,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] response is [] 2018-07-21T05:34:49,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:34:49,577 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,577 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:34:49,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:34:49,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:34:49,578 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,578 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:34:49,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:34:49,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:34:49,579 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,579 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:34:49,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:34:49,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] starting 2018-07-21T05:34:49,579 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,579 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] Got response: 204 No Content 2018-07-21T05:34:49,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] response is [] 2018-07-21T05:34:49,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] starting 2018-07-21T05:34:49,580 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,580 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] Got response: 204 No Content 2018-07-21T05:34:49,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] response is [] 2018-07-21T05:34:49,580 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:34:49,581 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,581 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:34:49,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:34:49,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] starting 2018-07-21T05:34:49,581 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,581 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] Got response: 204 No Content 2018-07-21T05:34:49,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] response is [] 2018-07-21T05:34:49,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] starting 2018-07-21T05:34:49,581 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,582 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] Got response: 204 No Content 2018-07-21T05:34:49,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] response is [] 2018-07-21T05:34:49,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:34:49,582 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,582 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:34:49,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:34:49,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] starting 2018-07-21T05:34:49,583 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,583 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] Got response: 204 No Content 2018-07-21T05:34:49,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] response is [] 2018-07-21T05:34:49,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] starting 2018-07-21T05:34:49,583 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,583 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] Got response: 204 No Content 2018-07-21T05:34:49,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] response is [] 2018-07-21T05:34:49,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:34:49,584 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,584 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:34:49,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:34:49,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:34:49,584 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,584 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:34:49,584 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:34:49,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] starting 2018-07-21T05:34:49,585 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,585 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] Got response: 204 No Content 2018-07-21T05:34:49,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] response is [] 2018-07-21T05:34:49,585 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:34:49,586 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,586 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:34:49,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:34:49,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] starting 2018-07-21T05:34:49,586 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,586 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] Got response: 204 No Content 2018-07-21T05:34:49,586 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] response is [] 2018-07-21T05:34:49,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] starting 2018-07-21T05:34:49,587 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,587 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] Got response: 204 No Content 2018-07-21T05:34:49,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] response is [] 2018-07-21T05:34:49,587 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:34:49,587 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,587 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:34:49,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:34:49,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:34:49,588 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,588 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:34:49,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:34:49,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] starting 2018-07-21T05:34:49,589 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,589 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] Got response: 204 No Content 2018-07-21T05:34:49,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] response is [] 2018-07-21T05:34:49,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:34:49,589 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,589 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:34:49,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:34:49,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:34:49,590 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,590 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:34:49,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:34:49,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] starting 2018-07-21T05:34:49,590 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,590 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] Got response: 204 No Content 2018-07-21T05:34:49,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] response is [] 2018-07-21T05:34:49,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] starting 2018-07-21T05:34:49,591 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,591 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] Got response: 204 No Content 2018-07-21T05:34:49,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] response is [] 2018-07-21T05:34:49,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:34:49,592 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,592 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:34:49,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:34:49,592 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:34:49,592 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,592 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:34:49,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:34:49,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:34:49,593 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,593 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:34:49,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:34:49,593 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:34:49,594 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,594 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:34:49,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:34:49,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:34:49,594 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,594 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:34:49,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:34:49,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:34:49,595 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,595 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:34:49,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:34:49,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:34:49,596 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,596 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:34:49,596 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:34:49,596 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:34:49,596 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,596 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:34:49,596 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:34:49,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:34:49,597 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,597 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:34:49,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:34:49,597 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:34:49,598 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,598 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:34:49,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:34:49,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] starting 2018-07-21T05:34:49,599 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,599 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] Got response: 204 No Content 2018-07-21T05:34:49,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] response is [] 2018-07-21T05:34:49,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:34:49,599 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,599 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:34:49,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:34:49,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:34:49,600 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,600 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:34:49,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:34:49,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] starting 2018-07-21T05:34:49,601 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,601 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] Got response: 204 No Content 2018-07-21T05:34:49,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] response is [] 2018-07-21T05:34:49,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] starting 2018-07-21T05:34:49,601 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,602 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] Got response: 204 No Content 2018-07-21T05:34:49,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] response is [] 2018-07-21T05:34:49,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] starting 2018-07-21T05:34:49,602 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,602 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] Got response: 204 No Content 2018-07-21T05:34:49,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] response is [] 2018-07-21T05:34:49,602 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:34:49,603 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,603 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:34:49,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:34:49,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] starting 2018-07-21T05:34:49,603 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,603 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] Got response: 204 No Content 2018-07-21T05:34:49,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] response is [] 2018-07-21T05:34:49,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] starting 2018-07-21T05:34:49,604 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,604 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] Got response: 204 No Content 2018-07-21T05:34:49,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] response is [] 2018-07-21T05:34:49,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:34:49,605 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,605 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:34:49,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:34:49,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:34:49,605 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,605 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:34:49,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:34:49,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:34:49,606 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,606 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:34:49,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:34:49,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] starting 2018-07-21T05:34:49,607 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,607 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] Got response: 204 No Content 2018-07-21T05:34:49,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] response is [] 2018-07-21T05:34:49,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] starting 2018-07-21T05:34:49,607 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,607 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] Got response: 204 No Content 2018-07-21T05:34:49,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] response is [] 2018-07-21T05:34:49,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] starting 2018-07-21T05:34:49,608 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,608 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] Got response: 204 No Content 2018-07-21T05:34:49,608 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] response is [] 2018-07-21T05:34:49,608 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:34:49,608 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,608 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:34:49,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:34:49,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:34:49,609 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,609 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:34:49,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:34:49,609 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:34:49,610 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,610 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:34:49,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:34:49,610 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] starting 2018-07-21T05:34:49,611 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,611 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] Got response: 204 No Content 2018-07-21T05:34:49,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] response is [] 2018-07-21T05:34:49,611 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] starting 2018-07-21T05:34:49,612 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,612 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] Got response: 204 No Content 2018-07-21T05:34:49,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] response is [] 2018-07-21T05:34:49,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] starting 2018-07-21T05:34:49,612 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,613 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] Got response: 204 No Content 2018-07-21T05:34:49,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] response is [] 2018-07-21T05:34:49,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:34:49,617 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,617 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:34:49,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:34:49,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:34:49,618 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,618 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:34:49,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:34:49,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] starting 2018-07-21T05:34:49,619 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,619 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] Got response: 204 No Content 2018-07-21T05:34:49,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] response is [] 2018-07-21T05:34:49,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] starting 2018-07-21T05:34:49,620 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,620 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] Got response: 204 No Content 2018-07-21T05:34:49,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] response is [] 2018-07-21T05:34:49,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] starting 2018-07-21T05:34:49,620 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,620 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] Got response: 204 No Content 2018-07-21T05:34:49,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] response is [] 2018-07-21T05:34:49,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:34:49,621 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,621 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:34:49,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:34:49,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:34:49,622 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,622 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:34:49,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:34:49,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] starting 2018-07-21T05:34:49,622 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,622 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] Got response: 204 No Content 2018-07-21T05:34:49,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] response is [] 2018-07-21T05:34:49,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] starting 2018-07-21T05:34:49,623 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,623 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] Got response: 204 No Content 2018-07-21T05:34:49,623 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] response is [] 2018-07-21T05:34:49,623 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] starting 2018-07-21T05:34:49,624 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,624 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] Got response: 204 No Content 2018-07-21T05:34:49,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] response is [] 2018-07-21T05:34:49,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:34:49,624 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,624 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:34:49,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:34:49,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] starting 2018-07-21T05:34:49,625 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,625 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] Got response: 204 No Content 2018-07-21T05:34:49,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] response is [] 2018-07-21T05:34:49,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] starting 2018-07-21T05:34:49,626 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,626 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] Got response: 204 No Content 2018-07-21T05:34:49,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] response is [] 2018-07-21T05:34:49,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] starting 2018-07-21T05:34:49,626 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,626 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] Got response: 204 No Content 2018-07-21T05:34:49,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] response is [] 2018-07-21T05:34:49,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:34:49,627 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,627 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:34:49,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:34:49,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] starting 2018-07-21T05:34:49,627 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,627 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] Got response: 204 No Content 2018-07-21T05:34:49,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] response is [] 2018-07-21T05:34:49,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] starting 2018-07-21T05:34:49,631 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,631 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] Got response: 204 No Content 2018-07-21T05:34:49,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] response is [] 2018-07-21T05:34:49,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] starting 2018-07-21T05:34:49,632 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,632 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] Got response: 204 No Content 2018-07-21T05:34:49,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] response is [] 2018-07-21T05:34:49,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:34:49,633 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,633 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:34:49,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:34:49,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] starting 2018-07-21T05:34:49,633 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,634 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] Got response: 204 No Content 2018-07-21T05:34:49,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] response is [] 2018-07-21T05:34:49,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] starting 2018-07-21T05:34:49,634 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,634 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] Got response: 204 No Content 2018-07-21T05:34:49,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] response is [] 2018-07-21T05:34:49,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:34:49,635 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,635 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:34:49,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:34:49,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] starting 2018-07-21T05:34:49,635 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,635 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] Got response: 204 No Content 2018-07-21T05:34:49,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] response is [] 2018-07-21T05:34:49,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] starting 2018-07-21T05:34:49,636 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,636 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] Got response: 204 No Content 2018-07-21T05:34:49,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] response is [] 2018-07-21T05:34:49,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] starting 2018-07-21T05:34:49,637 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,637 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] Got response: 204 No Content 2018-07-21T05:34:49,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] response is [] 2018-07-21T05:34:49,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:34:49,637 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,637 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:34:49,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:34:49,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:34:49,638 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,638 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:34:49,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:34:49,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] starting 2018-07-21T05:34:49,638 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,638 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] Got response: 204 No Content 2018-07-21T05:34:49,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] response is [] 2018-07-21T05:34:49,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] starting 2018-07-21T05:34:49,639 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,639 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] Got response: 204 No Content 2018-07-21T05:34:49,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] response is [] 2018-07-21T05:34:49,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:34:49,639 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,639 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:34:49,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:34:49,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:34:49,640 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,640 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:34:49,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:34:49,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] starting 2018-07-21T05:34:49,640 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,640 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] Got response: 204 No Content 2018-07-21T05:34:49,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] response is [] 2018-07-21T05:34:49,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] starting 2018-07-21T05:34:49,641 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,641 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] Got response: 204 No Content 2018-07-21T05:34:49,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] response is [] 2018-07-21T05:34:49,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] starting 2018-07-21T05:34:49,643 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,643 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] Got response: 204 No Content 2018-07-21T05:34:49,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] response is [] 2018-07-21T05:34:49,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:34:49,644 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,644 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:34:49,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:34:49,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:34:49,645 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,645 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:34:49,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:34:49,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:34:49,645 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,646 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:34:49,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:34:49,647 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:34:49,647 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,648 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:34:49,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:34:49,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] starting 2018-07-21T05:34:49,648 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,648 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] Got response: 204 No Content 2018-07-21T05:34:49,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] response is [] 2018-07-21T05:34:49,648 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:34:49,649 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,649 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:34:49,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:34:49,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:34:49,649 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,649 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:34:49,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:34:49,649 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:34:49,650 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,650 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:34:49,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:34:49,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:34:49,650 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,650 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:34:49,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:34:49,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:34:49,651 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,651 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:34:49,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:34:49,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:34:49,651 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,651 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:34:49,651 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:34:49,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:34:49,652 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,652 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:34:49,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:34:49,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:34:49,653 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,653 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:34:49,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:34:49,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:34:49,653 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,653 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:34:49,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:34:49,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:34:49,654 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,654 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:34:49,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:34:49,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:34:49,654 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,654 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:34:49,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:34:49,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] starting 2018-07-21T05:34:49,655 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,655 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] Got response: 204 No Content 2018-07-21T05:34:49,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] response is [] 2018-07-21T05:34:49,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] starting 2018-07-21T05:34:49,656 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,656 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] Got response: 204 No Content 2018-07-21T05:34:49,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] response is [] 2018-07-21T05:34:49,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:34:49,656 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,656 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:34:49,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:34:49,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] starting 2018-07-21T05:34:49,657 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,657 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] Got response: 204 No Content 2018-07-21T05:34:49,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] response is [] 2018-07-21T05:34:49,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:34:49,658 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,659 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:34:49,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:34:49,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:34:49,659 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,659 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:34:49,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:34:49,659 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] starting 2018-07-21T05:34:49,660 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,660 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] Got response: 204 No Content 2018-07-21T05:34:49,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] response is [] 2018-07-21T05:34:49,660 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] starting 2018-07-21T05:34:49,661 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,661 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] Got response: 204 No Content 2018-07-21T05:34:49,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] response is [] 2018-07-21T05:34:49,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] starting 2018-07-21T05:34:49,661 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,661 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] Got response: 204 No Content 2018-07-21T05:34:49,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] response is [] 2018-07-21T05:34:49,661 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:34:49,662 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,662 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:34:49,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:34:49,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] starting 2018-07-21T05:34:49,662 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,662 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] Got response: 204 No Content 2018-07-21T05:34:49,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] response is [] 2018-07-21T05:34:49,662 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] starting 2018-07-21T05:34:49,663 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,663 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] Got response: 204 No Content 2018-07-21T05:34:49,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] response is [] 2018-07-21T05:34:49,663 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] starting 2018-07-21T05:34:49,664 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,664 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] Got response: 204 No Content 2018-07-21T05:34:49,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] response is [] 2018-07-21T05:34:49,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:34:49,664 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,664 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:34:49,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:34:49,664 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:34:49,665 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,665 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:34:49,665 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:34:49,665 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] starting 2018-07-21T05:34:49,665 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,665 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] Got response: 204 No Content 2018-07-21T05:34:49,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] response is [] 2018-07-21T05:34:49,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] starting 2018-07-21T05:34:49,666 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,666 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] Got response: 204 No Content 2018-07-21T05:34:49,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] response is [] 2018-07-21T05:34:49,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] starting 2018-07-21T05:34:49,667 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,667 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] Got response: 204 No Content 2018-07-21T05:34:49,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] response is [] 2018-07-21T05:34:49,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:34:49,667 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,667 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:34:49,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:34:49,667 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:34:49,668 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,668 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:34:49,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:34:49,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] starting 2018-07-21T05:34:49,668 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,668 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] Got response: 204 No Content 2018-07-21T05:34:49,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] response is [] 2018-07-21T05:34:49,668 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] starting 2018-07-21T05:34:49,669 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,669 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] Got response: 204 No Content 2018-07-21T05:34:49,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] response is [] 2018-07-21T05:34:49,669 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] starting 2018-07-21T05:34:49,669 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,669 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] Got response: 204 No Content 2018-07-21T05:34:49,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] response is [] 2018-07-21T05:34:49,670 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:34:49,671 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,671 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:34:49,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:34:49,671 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] starting 2018-07-21T05:34:49,671 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,672 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] Got response: 204 No Content 2018-07-21T05:34:49,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] response is [] 2018-07-21T05:34:49,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] starting 2018-07-21T05:34:49,672 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,672 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] Got response: 204 No Content 2018-07-21T05:34:49,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] response is [] 2018-07-21T05:34:49,672 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] starting 2018-07-21T05:34:49,673 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,673 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] Got response: 204 No Content 2018-07-21T05:34:49,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] response is [] 2018-07-21T05:34:49,673 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:34:49,674 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,674 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:34:49,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:34:49,674 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] starting 2018-07-21T05:34:49,674 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,674 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] Got response: 204 No Content 2018-07-21T05:34:49,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] response is [] 2018-07-21T05:34:49,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] starting 2018-07-21T05:34:49,675 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,675 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] Got response: 204 No Content 2018-07-21T05:34:49,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] response is [] 2018-07-21T05:34:49,675 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] starting 2018-07-21T05:34:49,676 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,676 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] Got response: 204 No Content 2018-07-21T05:34:49,676 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] response is [] 2018-07-21T05:34:49,676 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:34:49,677 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,677 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:34:49,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:34:49,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] starting 2018-07-21T05:34:49,677 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,677 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] Got response: 204 No Content 2018-07-21T05:34:49,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] response is [] 2018-07-21T05:34:49,677 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] starting 2018-07-21T05:34:49,678 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,678 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] Got response: 204 No Content 2018-07-21T05:34:49,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] response is [] 2018-07-21T05:34:49,678 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] starting 2018-07-21T05:34:49,679 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,679 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] Got response: 204 No Content 2018-07-21T05:34:49,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] response is [] 2018-07-21T05:34:49,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:34:49,679 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,679 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:34:49,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:34:49,679 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] starting 2018-07-21T05:34:49,680 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,680 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] Got response: 204 No Content 2018-07-21T05:34:49,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] response is [] 2018-07-21T05:34:49,680 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] starting 2018-07-21T05:34:49,681 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,681 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] Got response: 204 No Content 2018-07-21T05:34:49,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] response is [] 2018-07-21T05:34:49,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] starting 2018-07-21T05:34:49,681 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,681 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] Got response: 204 No Content 2018-07-21T05:34:49,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] response is [] 2018-07-21T05:34:49,681 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:34:49,682 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,682 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:34:49,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:34:49,682 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:34:49,683 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,683 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:34:49,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:34:49,683 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] starting 2018-07-21T05:34:49,684 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,684 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] Got response: 204 No Content 2018-07-21T05:34:49,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] response is [] 2018-07-21T05:34:49,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] starting 2018-07-21T05:34:49,684 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,684 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] Got response: 204 No Content 2018-07-21T05:34:49,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] response is [] 2018-07-21T05:34:49,684 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] starting 2018-07-21T05:34:49,685 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,685 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] Got response: 204 No Content 2018-07-21T05:34:49,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] response is [] 2018-07-21T05:34:49,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] starting 2018-07-21T05:34:49,686 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,686 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] Got response: 204 No Content 2018-07-21T05:34:49,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] response is [] 2018-07-21T05:34:49,686 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] starting 2018-07-21T05:34:49,686 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,687 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] Got response: 204 No Content 2018-07-21T05:34:49,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] response is [] 2018-07-21T05:34:49,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] starting 2018-07-21T05:34:49,687 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,687 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] Got response: 204 No Content 2018-07-21T05:34:49,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] response is [] 2018-07-21T05:34:49,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:34:49,688 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,688 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:34:49,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:34:49,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] starting 2018-07-21T05:34:49,689 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,689 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] Got response: 204 No Content 2018-07-21T05:34:49,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] response is [] 2018-07-21T05:34:49,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:34:49,689 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,689 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:34:49,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:34:49,689 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] starting 2018-07-21T05:34:49,690 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,690 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] Got response: 204 No Content 2018-07-21T05:34:49,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] response is [] 2018-07-21T05:34:49,690 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:34:49,691 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,691 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:34:49,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:34:49,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] starting 2018-07-21T05:34:49,691 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,691 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] Got response: 204 No Content 2018-07-21T05:34:49,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] response is [] 2018-07-21T05:34:49,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:34:49,692 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,692 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:34:49,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:34:49,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] starting 2018-07-21T05:34:49,692 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,692 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] Got response: 204 No Content 2018-07-21T05:34:49,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] response is [] 2018-07-21T05:34:49,692 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:34:49,693 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,693 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:34:49,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:34:49,693 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:34:49,694 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,694 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:34:49,694 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:34:49,694 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:34:49,694 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,694 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:34:49,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:34:49,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:34:49,695 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,695 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:34:49,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:34:49,695 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:34:49,696 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,696 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:34:49,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:34:49,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:34:49,696 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,696 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:34:49,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:34:49,696 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:34:49,697 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,697 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:34:49,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:34:49,697 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] starting 2018-07-21T05:34:49,697 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,697 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] Got response: 204 No Content 2018-07-21T05:34:49,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] response is [] 2018-07-21T05:34:49,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] starting 2018-07-21T05:34:49,698 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,698 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] Got response: 204 No Content 2018-07-21T05:34:49,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] response is [] 2018-07-21T05:34:49,698 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] starting 2018-07-21T05:34:49,699 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,699 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] Got response: 204 No Content 2018-07-21T05:34:49,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] response is [] 2018-07-21T05:34:49,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] starting 2018-07-21T05:34:49,699 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,699 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] Got response: 204 No Content 2018-07-21T05:34:49,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] response is [] 2018-07-21T05:34:49,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] starting 2018-07-21T05:34:49,700 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,700 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] Got response: 204 No Content 2018-07-21T05:34:49,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] response is [] 2018-07-21T05:34:49,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] starting 2018-07-21T05:34:49,701 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,701 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] Got response: 204 No Content 2018-07-21T05:34:49,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] response is [] 2018-07-21T05:34:49,702 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] starting 2018-07-21T05:34:49,709 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,709 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] Got response: 204 No Content 2018-07-21T05:34:49,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] response is [] 2018-07-21T05:34:49,709 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] starting 2018-07-21T05:34:49,710 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,710 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] Got response: 204 No Content 2018-07-21T05:34:49,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] response is [] 2018-07-21T05:34:49,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] starting 2018-07-21T05:34:49,711 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,711 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] Got response: 204 No Content 2018-07-21T05:34:49,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] response is [] 2018-07-21T05:34:49,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] starting 2018-07-21T05:34:49,712 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,712 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] Got response: 204 No Content 2018-07-21T05:34:49,712 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] response is [] 2018-07-21T05:34:49,712 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] starting 2018-07-21T05:34:49,713 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,713 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] Got response: 204 No Content 2018-07-21T05:34:49,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] response is [] 2018-07-21T05:34:49,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:34:49,714 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,714 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:34:49,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:34:49,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:34:49,714 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,714 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:34:49,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:34:49,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] starting 2018-07-21T05:34:49,715 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,715 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] Got response: 204 No Content 2018-07-21T05:34:49,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] response is [] 2018-07-21T05:34:49,715 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] starting 2018-07-21T05:34:49,716 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,716 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] Got response: 204 No Content 2018-07-21T05:34:49,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] response is [] 2018-07-21T05:34:49,716 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:34:49,717 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,717 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:34:49,717 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:34:49,717 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:34:49,718 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,718 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:34:49,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:34:49,718 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] starting 2018-07-21T05:34:49,719 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,719 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] Got response: 204 No Content 2018-07-21T05:34:49,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] response is [] 2018-07-21T05:34:49,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] starting 2018-07-21T05:34:49,719 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,719 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] Got response: 204 No Content 2018-07-21T05:34:49,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] response is [] 2018-07-21T05:34:49,719 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:34:49,720 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,720 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:34:49,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:34:49,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:34:49,721 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,721 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:34:49,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:34:49,721 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] starting 2018-07-21T05:34:49,724 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,724 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] Got response: 204 No Content 2018-07-21T05:34:49,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] response is [] 2018-07-21T05:34:49,724 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] starting 2018-07-21T05:34:49,725 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,725 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] Got response: 204 No Content 2018-07-21T05:34:49,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] response is [] 2018-07-21T05:34:49,725 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:34:49,726 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,726 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:34:49,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:34:49,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:34:49,727 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,727 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:34:49,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:34:49,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] starting 2018-07-21T05:34:49,727 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,727 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] Got response: 204 No Content 2018-07-21T05:34:49,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] response is [] 2018-07-21T05:34:49,727 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] starting 2018-07-21T05:34:49,728 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,728 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] Got response: 204 No Content 2018-07-21T05:34:49,728 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] response is [] 2018-07-21T05:34:49,728 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:34:49,729 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,729 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:34:49,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:34:49,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:34:49,729 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,729 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:34:49,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:34:49,729 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] starting 2018-07-21T05:34:49,730 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,730 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] Got response: 204 No Content 2018-07-21T05:34:49,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] response is [] 2018-07-21T05:34:49,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:34:49,730 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,730 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:34:49,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:34:49,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:34:49,731 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,731 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:34:49,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:34:49,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:34:49,732 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,732 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:34:49,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:34:49,732 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:34:49,733 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,733 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:34:49,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:34:49,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:34:49,733 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,733 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:34:49,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:34:49,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:34:49,734 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,734 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:34:49,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:34:49,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:34:49,735 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,735 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:34:49,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:34:49,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] starting 2018-07-21T05:34:49,736 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,736 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] Got response: 204 No Content 2018-07-21T05:34:49,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] response is [] 2018-07-21T05:34:49,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] starting 2018-07-21T05:34:49,737 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,737 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] Got response: 204 No Content 2018-07-21T05:34:49,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] response is [] 2018-07-21T05:34:49,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] starting 2018-07-21T05:34:49,738 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,738 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] Got response: 204 No Content 2018-07-21T05:34:49,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] response is [] 2018-07-21T05:34:49,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] starting 2018-07-21T05:34:49,738 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,738 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] Got response: 204 No Content 2018-07-21T05:34:49,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] response is [] 2018-07-21T05:34:49,738 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] starting 2018-07-21T05:34:49,739 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,739 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] Got response: 204 No Content 2018-07-21T05:34:49,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] response is [] 2018-07-21T05:34:49,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] starting 2018-07-21T05:34:49,740 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,740 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] Got response: 204 No Content 2018-07-21T05:34:49,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] response is [] 2018-07-21T05:34:49,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] starting 2018-07-21T05:34:49,741 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,741 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] Got response: 204 No Content 2018-07-21T05:34:49,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] response is [] 2018-07-21T05:34:49,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] starting 2018-07-21T05:34:49,741 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,741 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] Got response: 204 No Content 2018-07-21T05:34:49,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] response is [] 2018-07-21T05:34:49,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] starting 2018-07-21T05:34:49,742 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,742 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] Got response: 204 No Content 2018-07-21T05:34:49,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] response is [] 2018-07-21T05:34:49,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:34:49,742 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,742 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:34:49,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:34:49,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] starting 2018-07-21T05:34:49,743 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,743 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] Got response: 204 No Content 2018-07-21T05:34:49,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] response is [] 2018-07-21T05:34:49,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] starting 2018-07-21T05:34:49,744 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,744 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] Got response: 204 No Content 2018-07-21T05:34:49,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] response is [] 2018-07-21T05:34:49,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:34:49,744 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,744 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:34:49,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:34:49,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] starting 2018-07-21T05:34:49,745 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,745 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] Got response: 204 No Content 2018-07-21T05:34:49,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] response is [] 2018-07-21T05:34:49,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] starting 2018-07-21T05:34:49,746 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,746 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] Got response: 204 No Content 2018-07-21T05:34:49,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] response is [] 2018-07-21T05:34:49,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:34:49,746 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,746 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:34:49,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:34:49,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] starting 2018-07-21T05:34:49,747 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,747 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] Got response: 204 No Content 2018-07-21T05:34:49,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] response is [] 2018-07-21T05:34:49,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] starting 2018-07-21T05:34:49,748 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,748 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] Got response: 204 No Content 2018-07-21T05:34:49,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] response is [] 2018-07-21T05:34:49,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:34:49,748 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,748 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:34:49,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:34:49,748 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] starting 2018-07-21T05:34:49,749 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,749 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] Got response: 204 No Content 2018-07-21T05:34:49,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] response is [] 2018-07-21T05:34:49,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] starting 2018-07-21T05:34:49,750 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,750 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] Got response: 204 No Content 2018-07-21T05:34:49,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] response is [] 2018-07-21T05:34:49,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:34:49,750 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,750 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:34:49,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:34:49,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] starting 2018-07-21T05:34:49,751 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,751 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] Got response: 204 No Content 2018-07-21T05:34:49,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] response is [] 2018-07-21T05:34:49,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:34:49,752 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,752 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:34:49,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:34:49,752 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] starting 2018-07-21T05:34:49,753 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,753 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] Got response: 204 No Content 2018-07-21T05:34:49,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] response is [] 2018-07-21T05:34:49,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] starting 2018-07-21T05:34:49,754 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,754 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] Got response: 204 No Content 2018-07-21T05:34:49,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] response is [] 2018-07-21T05:34:49,754 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:34:49,755 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,755 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:34:49,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:34:49,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:34:49,755 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,755 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:34:49,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:34:49,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:34:49,756 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,756 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:34:49,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:34:49,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:34:49,756 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,756 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:34:49,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:34:49,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:34:49,757 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,757 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:34:49,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:34:49,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] starting 2018-07-21T05:34:49,758 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,758 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] Got response: 204 No Content 2018-07-21T05:34:49,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] response is [] 2018-07-21T05:34:49,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] starting 2018-07-21T05:34:49,758 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,758 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] Got response: 204 No Content 2018-07-21T05:34:49,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] response is [] 2018-07-21T05:34:49,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] starting 2018-07-21T05:34:49,759 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,759 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] Got response: 204 No Content 2018-07-21T05:34:49,759 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] response is [] 2018-07-21T05:34:49,759 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] starting 2018-07-21T05:34:49,760 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,760 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] Got response: 204 No Content 2018-07-21T05:34:49,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] response is [] 2018-07-21T05:34:49,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] starting 2018-07-21T05:34:49,760 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,760 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] Got response: 204 No Content 2018-07-21T05:34:49,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] response is [] 2018-07-21T05:34:49,760 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] starting 2018-07-21T05:34:49,761 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,761 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] Got response: 204 No Content 2018-07-21T05:34:49,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] response is [] 2018-07-21T05:34:49,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] starting 2018-07-21T05:34:49,762 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,762 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] Got response: 204 No Content 2018-07-21T05:34:49,762 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] response is [] 2018-07-21T05:34:49,762 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] starting 2018-07-21T05:34:49,763 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,763 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] Got response: 204 No Content 2018-07-21T05:34:49,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] response is [] 2018-07-21T05:34:49,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] starting 2018-07-21T05:34:49,764 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,764 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] Got response: 204 No Content 2018-07-21T05:34:49,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] response is [] 2018-07-21T05:34:49,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] starting 2018-07-21T05:34:49,764 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,764 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] Got response: 204 No Content 2018-07-21T05:34:49,764 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] response is [] 2018-07-21T05:34:49,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] starting 2018-07-21T05:34:49,765 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,765 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] Got response: 204 No Content 2018-07-21T05:34:49,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] response is [] 2018-07-21T05:34:49,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] starting 2018-07-21T05:34:49,766 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,766 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] Got response: 204 No Content 2018-07-21T05:34:49,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] response is [] 2018-07-21T05:34:49,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:34:49,767 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,767 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:34:49,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:34:49,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] starting 2018-07-21T05:34:49,768 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,768 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] Got response: 204 No Content 2018-07-21T05:34:49,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] response is [] 2018-07-21T05:34:49,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] starting 2018-07-21T05:34:49,769 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,769 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] Got response: 204 No Content 2018-07-21T05:34:49,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] response is [] 2018-07-21T05:34:49,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] starting 2018-07-21T05:34:49,773 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,773 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] Got response: 204 No Content 2018-07-21T05:34:49,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] response is [] 2018-07-21T05:34:49,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:34:49,774 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,774 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:34:49,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:34:49,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] starting 2018-07-21T05:34:49,775 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,775 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] Got response: 204 No Content 2018-07-21T05:34:49,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] response is [] 2018-07-21T05:34:49,775 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] starting 2018-07-21T05:34:49,776 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,776 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] Got response: 204 No Content 2018-07-21T05:34:49,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] response is [] 2018-07-21T05:34:49,776 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] starting 2018-07-21T05:34:49,777 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,777 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] Got response: 204 No Content 2018-07-21T05:34:49,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] response is [] 2018-07-21T05:34:49,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:34:49,777 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,777 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:34:49,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:34:49,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] starting 2018-07-21T05:34:49,778 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,778 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] Got response: 204 No Content 2018-07-21T05:34:49,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] response is [] 2018-07-21T05:34:49,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] starting 2018-07-21T05:34:49,779 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,779 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] Got response: 204 No Content 2018-07-21T05:34:49,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] response is [] 2018-07-21T05:34:49,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] starting 2018-07-21T05:34:49,779 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,779 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] Got response: 204 No Content 2018-07-21T05:34:49,779 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] response is [] 2018-07-21T05:34:49,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:34:49,780 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,780 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:34:49,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:34:49,780 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] starting 2018-07-21T05:34:49,781 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,781 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] Got response: 204 No Content 2018-07-21T05:34:49,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] response is [] 2018-07-21T05:34:49,781 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] starting 2018-07-21T05:34:49,782 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,782 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] Got response: 204 No Content 2018-07-21T05:34:49,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] response is [] 2018-07-21T05:34:49,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] starting 2018-07-21T05:34:49,782 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,782 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] Got response: 204 No Content 2018-07-21T05:34:49,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] response is [] 2018-07-21T05:34:49,782 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] starting 2018-07-21T05:34:49,783 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,783 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] Got response: 204 No Content 2018-07-21T05:34:49,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] response is [] 2018-07-21T05:34:49,783 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] starting 2018-07-21T05:34:49,784 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,784 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] Got response: 204 No Content 2018-07-21T05:34:49,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] response is [] 2018-07-21T05:34:49,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:34:49,786 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,786 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:34:49,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:34:49,786 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] starting 2018-07-21T05:34:49,787 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,787 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] Got response: 204 No Content 2018-07-21T05:34:49,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] response is [] 2018-07-21T05:34:49,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] starting 2018-07-21T05:34:49,788 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,788 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] Got response: 204 No Content 2018-07-21T05:34:49,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] response is [] 2018-07-21T05:34:49,788 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] starting 2018-07-21T05:34:49,790 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,790 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] Got response: 204 No Content 2018-07-21T05:34:49,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] response is [] 2018-07-21T05:34:49,791 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] starting 2018-07-21T05:34:49,792 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,792 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] Got response: 204 No Content 2018-07-21T05:34:49,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] response is [] 2018-07-21T05:34:49,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] starting 2018-07-21T05:34:49,792 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,792 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] Got response: 204 No Content 2018-07-21T05:34:49,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] response is [] 2018-07-21T05:34:49,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] starting 2018-07-21T05:34:49,793 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,793 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] Got response: 204 No Content 2018-07-21T05:34:49,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] response is [] 2018-07-21T05:34:49,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] starting 2018-07-21T05:34:49,794 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,794 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] Got response: 204 No Content 2018-07-21T05:34:49,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] response is [] 2018-07-21T05:34:49,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] starting 2018-07-21T05:34:49,795 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,795 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] Got response: 204 No Content 2018-07-21T05:34:49,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] response is [] 2018-07-21T05:34:49,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] starting 2018-07-21T05:34:49,796 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,796 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] Got response: 204 No Content 2018-07-21T05:34:49,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] response is [] 2018-07-21T05:34:49,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] starting 2018-07-21T05:34:49,796 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,796 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] Got response: 204 No Content 2018-07-21T05:34:49,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] response is [] 2018-07-21T05:34:49,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] starting 2018-07-21T05:34:49,797 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,797 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] Got response: 204 No Content 2018-07-21T05:34:49,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] response is [] 2018-07-21T05:34:49,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] starting 2018-07-21T05:34:49,798 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,798 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] Got response: 204 No Content 2018-07-21T05:34:49,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] response is [] 2018-07-21T05:34:49,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] starting 2018-07-21T05:34:49,798 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,798 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] Got response: 204 No Content 2018-07-21T05:34:49,798 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] response is [] 2018-07-21T05:34:49,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:34:49,799 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,799 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:34:49,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:34:49,799 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] starting 2018-07-21T05:34:49,800 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,800 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] Got response: 204 No Content 2018-07-21T05:34:49,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] response is [] 2018-07-21T05:34:49,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] starting 2018-07-21T05:34:49,800 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,800 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] Got response: 204 No Content 2018-07-21T05:34:49,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] response is [] 2018-07-21T05:34:49,800 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] starting 2018-07-21T05:34:49,801 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,801 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] Got response: 204 No Content 2018-07-21T05:34:49,801 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] response is [] 2018-07-21T05:34:49,801 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] starting 2018-07-21T05:34:49,802 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,802 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] Got response: 204 No Content 2018-07-21T05:34:49,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] response is [] 2018-07-21T05:34:49,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] starting 2018-07-21T05:34:49,802 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,802 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] Got response: 204 No Content 2018-07-21T05:34:49,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] response is [] 2018-07-21T05:34:49,802 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] starting 2018-07-21T05:34:49,803 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,803 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] Got response: 204 No Content 2018-07-21T05:34:49,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] response is [] 2018-07-21T05:34:49,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] starting 2018-07-21T05:34:49,803 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,803 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] Got response: 204 No Content 2018-07-21T05:34:49,803 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] response is [] 2018-07-21T05:34:49,804 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] starting 2018-07-21T05:34:49,809 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,809 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] Got response: 204 No Content 2018-07-21T05:34:49,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] response is [] 2018-07-21T05:34:49,811 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] starting 2018-07-21T05:34:49,812 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,812 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] Got response: 204 No Content 2018-07-21T05:34:49,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] response is [] 2018-07-21T05:34:49,812 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] starting 2018-07-21T05:34:49,813 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,813 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] Got response: 204 No Content 2018-07-21T05:34:49,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] response is [] 2018-07-21T05:34:49,813 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] starting 2018-07-21T05:34:49,814 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,814 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] Got response: 204 No Content 2018-07-21T05:34:49,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] response is [] 2018-07-21T05:34:49,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] starting 2018-07-21T05:34:49,814 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,814 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] Got response: 204 No Content 2018-07-21T05:34:49,814 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] response is [] 2018-07-21T05:34:49,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] starting 2018-07-21T05:34:49,815 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,815 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] Got response: 204 No Content 2018-07-21T05:34:49,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] response is [] 2018-07-21T05:34:49,815 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] starting 2018-07-21T05:34:49,816 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,816 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] Got response: 204 No Content 2018-07-21T05:34:49,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] response is [] 2018-07-21T05:34:49,816 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] starting 2018-07-21T05:34:49,817 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,817 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] Got response: 204 No Content 2018-07-21T05:34:49,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] response is [] 2018-07-21T05:34:49,817 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] starting 2018-07-21T05:34:49,817 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,817 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] Got response: 204 No Content 2018-07-21T05:34:49,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] response is [] 2018-07-21T05:34:49,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] starting 2018-07-21T05:34:49,818 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,818 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] Got response: 204 No Content 2018-07-21T05:34:49,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] response is [] 2018-07-21T05:34:49,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:34:49,819 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,819 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:34:49,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:34:49,819 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:34:49,819 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,819 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:34:49,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:34:49,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:34:49,820 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,820 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:34:49,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:34:49,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] starting 2018-07-21T05:34:49,822 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,822 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] Got response: 204 No Content 2018-07-21T05:34:49,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] response is [] 2018-07-21T05:34:49,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] starting 2018-07-21T05:34:49,824 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,824 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] Got response: 204 No Content 2018-07-21T05:34:49,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] response is [] 2018-07-21T05:34:49,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] starting 2018-07-21T05:34:49,825 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,825 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] Got response: 204 No Content 2018-07-21T05:34:49,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] response is [] 2018-07-21T05:34:49,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:34:49,825 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,825 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:34:49,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:34:49,825 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:34:49,826 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,826 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:34:49,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:34:49,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:34:49,826 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,826 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:34:49,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:34:49,826 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:34:49,827 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,827 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:34:49,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:34:49,827 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:34:49,828 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,828 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:34:49,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:34:49,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:34:49,828 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,828 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:34:49,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:34:49,828 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:34:49,829 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,829 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:34:49,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:34:49,829 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:34:49,830 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,830 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:34:49,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:34:49,830 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:34:49,831 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,831 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:34:49,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:34:49,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:34:49,831 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,831 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:34:49,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:34:49,831 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:34:49,832 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,832 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:34:49,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:34:49,832 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:34:49,833 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,833 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:34:49,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:34:49,833 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:34:49,834 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,834 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:34:49,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:34:49,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:34:49,834 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,834 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:34:49,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:34:49,834 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] starting 2018-07-21T05:34:49,837 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,837 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] Got response: 204 No Content 2018-07-21T05:34:49,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] response is [] 2018-07-21T05:34:49,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] starting 2018-07-21T05:34:49,838 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,838 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] Got response: 204 No Content 2018-07-21T05:34:49,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] response is [] 2018-07-21T05:34:49,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] starting 2018-07-21T05:34:49,838 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,838 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] Got response: 204 No Content 2018-07-21T05:34:49,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] response is [] 2018-07-21T05:34:49,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:34:49,840 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,840 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:34:49,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:34:49,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] starting 2018-07-21T05:34:49,841 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,841 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] Got response: 204 No Content 2018-07-21T05:34:49,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] response is [] 2018-07-21T05:34:49,841 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] starting 2018-07-21T05:34:49,842 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,842 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] Got response: 204 No Content 2018-07-21T05:34:49,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] response is [] 2018-07-21T05:34:49,842 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] starting 2018-07-21T05:34:49,850 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,850 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] Got response: 204 No Content 2018-07-21T05:34:49,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] response is [] 2018-07-21T05:34:49,852 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] starting 2018-07-21T05:34:49,853 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,853 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] Got response: 204 No Content 2018-07-21T05:34:49,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] response is [] 2018-07-21T05:34:49,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] starting 2018-07-21T05:34:49,854 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,854 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] Got response: 204 No Content 2018-07-21T05:34:49,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] response is [] 2018-07-21T05:34:49,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] starting 2018-07-21T05:34:49,855 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,855 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] Got response: 204 No Content 2018-07-21T05:34:49,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] response is [] 2018-07-21T05:34:49,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:34:49,856 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,856 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:34:49,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:34:49,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:34:49,857 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,857 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:34:49,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:34:49,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:34:49,858 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,858 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:34:49,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:34:49,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:34:49,858 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,858 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:34:49,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:34:49,858 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:34:49,859 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,859 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:34:49,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:34:49,859 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:34:49,860 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,860 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:34:49,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:34:49,860 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:34:49,860 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,860 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:34:49,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:34:49,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:34:49,861 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,861 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:34:49,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:34:49,861 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] starting 2018-07-21T05:34:49,862 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,862 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] Got response: 204 No Content 2018-07-21T05:34:49,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] response is [] 2018-07-21T05:34:49,862 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] starting 2018-07-21T05:34:49,863 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,863 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] Got response: 204 No Content 2018-07-21T05:34:49,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] response is [] 2018-07-21T05:34:49,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] starting 2018-07-21T05:34:49,863 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,863 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] Got response: 204 No Content 2018-07-21T05:34:49,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] response is [] 2018-07-21T05:34:49,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] starting 2018-07-21T05:34:49,864 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,864 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] Got response: 204 No Content 2018-07-21T05:34:49,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] response is [] 2018-07-21T05:34:49,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] starting 2018-07-21T05:34:49,865 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,865 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] Got response: 204 No Content 2018-07-21T05:34:49,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] response is [] 2018-07-21T05:34:49,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] starting 2018-07-21T05:34:49,866 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,866 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] Got response: 204 No Content 2018-07-21T05:34:49,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] response is [] 2018-07-21T05:34:49,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:34:49,866 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,866 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:34:49,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:34:49,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] starting 2018-07-21T05:34:49,867 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,867 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] Got response: 204 No Content 2018-07-21T05:34:49,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] response is [] 2018-07-21T05:34:49,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] starting 2018-07-21T05:34:49,868 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,868 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] Got response: 204 No Content 2018-07-21T05:34:49,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] response is [] 2018-07-21T05:34:49,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] starting 2018-07-21T05:34:49,868 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,868 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] Got response: 204 No Content 2018-07-21T05:34:49,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] response is [] 2018-07-21T05:34:49,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:34:49,869 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,869 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:34:49,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:34:49,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] starting 2018-07-21T05:34:49,870 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,870 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] Got response: 204 No Content 2018-07-21T05:34:49,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] response is [] 2018-07-21T05:34:49,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] starting 2018-07-21T05:34:49,872 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,872 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] Got response: 204 No Content 2018-07-21T05:34:49,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] response is [] 2018-07-21T05:34:49,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] starting 2018-07-21T05:34:49,872 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,872 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] Got response: 204 No Content 2018-07-21T05:34:49,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] response is [] 2018-07-21T05:34:49,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:34:49,873 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,873 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:34:49,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:34:49,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] starting 2018-07-21T05:34:49,874 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,874 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] Got response: 204 No Content 2018-07-21T05:34:49,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] response is [] 2018-07-21T05:34:49,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] starting 2018-07-21T05:34:49,874 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,874 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] Got response: 204 No Content 2018-07-21T05:34:49,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] response is [] 2018-07-21T05:34:49,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] starting 2018-07-21T05:34:49,875 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,875 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] Got response: 204 No Content 2018-07-21T05:34:49,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] response is [] 2018-07-21T05:34:49,875 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:34:49,875 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,875 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:34:49,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:34:49,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] starting 2018-07-21T05:34:49,876 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,876 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] Got response: 204 No Content 2018-07-21T05:34:49,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] response is [] 2018-07-21T05:34:49,876 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] starting 2018-07-21T05:34:49,885 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,885 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] Got response: 204 No Content 2018-07-21T05:34:49,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] response is [] 2018-07-21T05:34:49,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] starting 2018-07-21T05:34:49,886 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,886 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] Got response: 204 No Content 2018-07-21T05:34:49,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] response is [] 2018-07-21T05:34:49,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:34:49,887 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,887 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:34:49,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:34:49,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] starting 2018-07-21T05:34:49,888 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,888 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] Got response: 204 No Content 2018-07-21T05:34:49,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] response is [] 2018-07-21T05:34:49,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] starting 2018-07-21T05:34:49,889 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,889 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] Got response: 204 No Content 2018-07-21T05:34:49,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] response is [] 2018-07-21T05:34:49,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] starting 2018-07-21T05:34:49,890 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,890 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] Got response: 204 No Content 2018-07-21T05:34:49,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] response is [] 2018-07-21T05:34:49,890 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:34:49,891 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,891 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:34:49,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:34:49,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] starting 2018-07-21T05:34:49,891 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,891 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] Got response: 204 No Content 2018-07-21T05:34:49,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] response is [] 2018-07-21T05:34:49,891 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] starting 2018-07-21T05:34:49,892 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,892 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] Got response: 204 No Content 2018-07-21T05:34:49,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] response is [] 2018-07-21T05:34:49,892 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] starting 2018-07-21T05:34:49,893 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,893 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] Got response: 204 No Content 2018-07-21T05:34:49,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] response is [] 2018-07-21T05:34:49,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] starting 2018-07-21T05:34:49,893 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,893 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] Got response: 204 No Content 2018-07-21T05:34:49,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] response is [] 2018-07-21T05:34:49,893 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] starting 2018-07-21T05:34:49,894 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,894 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] Got response: 204 No Content 2018-07-21T05:34:49,894 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] response is [] 2018-07-21T05:34:49,894 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] starting 2018-07-21T05:34:49,895 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,895 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] Got response: 204 No Content 2018-07-21T05:34:49,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] response is [] 2018-07-21T05:34:49,895 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:34:49,896 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,896 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:34:49,896 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:34:49,896 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] starting 2018-07-21T05:34:49,896 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,896 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] Got response: 204 No Content 2018-07-21T05:34:49,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] response is [] 2018-07-21T05:34:49,897 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] starting 2018-07-21T05:34:49,900 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,900 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] Got response: 204 No Content 2018-07-21T05:34:49,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] response is [] 2018-07-21T05:34:49,900 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] starting 2018-07-21T05:34:49,901 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,901 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] Got response: 204 No Content 2018-07-21T05:34:49,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] response is [] 2018-07-21T05:34:49,901 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] starting 2018-07-21T05:34:49,902 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,902 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] Got response: 204 No Content 2018-07-21T05:34:49,902 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] response is [] 2018-07-21T05:34:49,902 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] starting 2018-07-21T05:34:49,902 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,902 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] Got response: 204 No Content 2018-07-21T05:34:49,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] response is [] 2018-07-21T05:34:49,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] starting 2018-07-21T05:34:49,903 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,903 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] Got response: 204 No Content 2018-07-21T05:34:49,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] response is [] 2018-07-21T05:34:49,903 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] starting 2018-07-21T05:34:49,904 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,904 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] Got response: 204 No Content 2018-07-21T05:34:49,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] response is [] 2018-07-21T05:34:49,904 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] starting 2018-07-21T05:34:49,905 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,905 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] Got response: 204 No Content 2018-07-21T05:34:49,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] response is [] 2018-07-21T05:34:49,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:34:49,905 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,905 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:34:49,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:34:49,905 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:34:49,906 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,906 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:34:49,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:34:49,906 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] starting 2018-07-21T05:34:49,907 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,907 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] Got response: 204 No Content 2018-07-21T05:34:49,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] response is [] 2018-07-21T05:34:49,907 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:34:49,908 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,908 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:34:49,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:34:49,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:34:49,908 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,908 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:34:49,908 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:34:49,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:34:49,909 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,909 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:34:49,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:34:49,909 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:34:49,910 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,910 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:34:49,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:34:49,910 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:34:49,911 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,911 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:34:49,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:34:49,911 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:34:49,912 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,912 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:34:49,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:34:49,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:34:49,912 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,912 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:34:49,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:34:49,912 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] starting 2018-07-21T05:34:49,913 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,913 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] Got response: 204 No Content 2018-07-21T05:34:49,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] response is [] 2018-07-21T05:34:49,913 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] starting 2018-07-21T05:34:49,914 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,914 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] Got response: 204 No Content 2018-07-21T05:34:49,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] response is [] 2018-07-21T05:34:49,914 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] starting 2018-07-21T05:34:49,915 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,915 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] Got response: 204 No Content 2018-07-21T05:34:49,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] response is [] 2018-07-21T05:34:49,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] starting 2018-07-21T05:34:49,915 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,915 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] Got response: 204 No Content 2018-07-21T05:34:49,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] response is [] 2018-07-21T05:34:49,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] starting 2018-07-21T05:34:49,916 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,916 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] Got response: 204 No Content 2018-07-21T05:34:49,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] response is [] 2018-07-21T05:34:49,916 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] starting 2018-07-21T05:34:49,917 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,917 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] Got response: 204 No Content 2018-07-21T05:34:49,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] response is [] 2018-07-21T05:34:49,917 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] starting 2018-07-21T05:34:49,921 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,921 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] Got response: 204 No Content 2018-07-21T05:34:49,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] response is [] 2018-07-21T05:34:49,921 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] starting 2018-07-21T05:34:49,921 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,922 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] Got response: 204 No Content 2018-07-21T05:34:49,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] response is [] 2018-07-21T05:34:49,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] starting 2018-07-21T05:34:49,922 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,922 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] Got response: 204 No Content 2018-07-21T05:34:49,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] response is [] 2018-07-21T05:34:49,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] starting 2018-07-21T05:34:49,923 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,923 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] Got response: 204 No Content 2018-07-21T05:34:49,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] response is [] 2018-07-21T05:34:49,923 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] starting 2018-07-21T05:34:49,924 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,924 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] Got response: 204 No Content 2018-07-21T05:34:49,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] response is [] 2018-07-21T05:34:49,924 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] starting 2018-07-21T05:34:49,925 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,925 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] Got response: 204 No Content 2018-07-21T05:34:49,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] response is [] 2018-07-21T05:34:49,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] starting 2018-07-21T05:34:49,925 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,925 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] Got response: 204 No Content 2018-07-21T05:34:49,925 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] response is [] 2018-07-21T05:34:49,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] starting 2018-07-21T05:34:49,926 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,926 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] Got response: 204 No Content 2018-07-21T05:34:49,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] response is [] 2018-07-21T05:34:49,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] starting 2018-07-21T05:34:49,927 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,927 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] Got response: 204 No Content 2018-07-21T05:34:49,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] response is [] 2018-07-21T05:34:49,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] starting 2018-07-21T05:34:49,927 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,927 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] Got response: 204 No Content 2018-07-21T05:34:49,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] response is [] 2018-07-21T05:34:49,927 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] starting 2018-07-21T05:34:49,928 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,928 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] Got response: 204 No Content 2018-07-21T05:34:49,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] response is [] 2018-07-21T05:34:49,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] starting 2018-07-21T05:34:49,928 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,928 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] Got response: 204 No Content 2018-07-21T05:34:49,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] response is [] 2018-07-21T05:34:49,928 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] starting 2018-07-21T05:34:49,929 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,929 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] Got response: 204 No Content 2018-07-21T05:34:49,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] response is [] 2018-07-21T05:34:49,929 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] starting 2018-07-21T05:34:49,930 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,930 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] Got response: 204 No Content 2018-07-21T05:34:49,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] response is [] 2018-07-21T05:34:49,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] starting 2018-07-21T05:34:49,930 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,930 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] Got response: 204 No Content 2018-07-21T05:34:49,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] response is [] 2018-07-21T05:34:49,930 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] starting 2018-07-21T05:34:49,931 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,931 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] Got response: 204 No Content 2018-07-21T05:34:49,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] response is [] 2018-07-21T05:34:49,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] starting 2018-07-21T05:34:49,931 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,931 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] Got response: 204 No Content 2018-07-21T05:34:49,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] response is [] 2018-07-21T05:34:49,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] starting 2018-07-21T05:34:49,932 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,932 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] Got response: 204 No Content 2018-07-21T05:34:49,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] response is [] 2018-07-21T05:34:49,932 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] starting 2018-07-21T05:34:49,933 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,933 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] Got response: 204 No Content 2018-07-21T05:34:49,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] response is [] 2018-07-21T05:34:49,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] starting 2018-07-21T05:34:49,933 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,933 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] Got response: 204 No Content 2018-07-21T05:34:49,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] response is [] 2018-07-21T05:34:49,933 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] starting 2018-07-21T05:34:49,934 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,934 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] Got response: 204 No Content 2018-07-21T05:34:49,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] response is [] 2018-07-21T05:34:49,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] starting 2018-07-21T05:34:49,934 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,934 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] Got response: 204 No Content 2018-07-21T05:34:49,934 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] response is [] 2018-07-21T05:34:49,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] starting 2018-07-21T05:34:49,935 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,935 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] Got response: 204 No Content 2018-07-21T05:34:49,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] response is [] 2018-07-21T05:34:49,935 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:34:49,936 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,936 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:34:49,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:34:49,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:34:49,936 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,936 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:34:49,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:34:49,936 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] starting 2018-07-21T05:34:49,937 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,937 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] Got response: 204 No Content 2018-07-21T05:34:49,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] response is [] 2018-07-21T05:34:49,937 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:34:49,938 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,938 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:34:49,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:34:49,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:34:49,938 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,938 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:34:49,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:34:49,938 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:34:49,939 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,939 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:34:49,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:34:49,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:34:49,940 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,940 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:34:49,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:34:49,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:34:49,940 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,940 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:34:49,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:34:49,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:34:49,941 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,941 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:34:49,941 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:34:49,941 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:34:49,941 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,941 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:34:49,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:34:49,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:34:49,942 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,942 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:34:49,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:34:49,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] starting 2018-07-21T05:34:49,943 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,943 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] Got response: 204 No Content 2018-07-21T05:34:49,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] response is [] 2018-07-21T05:34:49,943 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] starting 2018-07-21T05:34:49,943 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,944 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] Got response: 204 No Content 2018-07-21T05:34:49,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] response is [] 2018-07-21T05:34:49,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] starting 2018-07-21T05:34:49,944 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,944 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] Got response: 204 No Content 2018-07-21T05:34:49,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] response is [] 2018-07-21T05:34:49,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] starting 2018-07-21T05:34:49,945 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,945 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] Got response: 204 No Content 2018-07-21T05:34:49,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] response is [] 2018-07-21T05:34:49,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] starting 2018-07-21T05:34:49,945 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,945 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] Got response: 204 No Content 2018-07-21T05:34:49,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] response is [] 2018-07-21T05:34:49,946 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] starting 2018-07-21T05:34:49,948 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,948 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] Got response: 204 No Content 2018-07-21T05:34:49,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] response is [] 2018-07-21T05:34:49,948 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] starting 2018-07-21T05:34:49,949 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,949 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] Got response: 204 No Content 2018-07-21T05:34:49,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] response is [] 2018-07-21T05:34:49,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] starting 2018-07-21T05:34:49,949 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,949 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] Got response: 204 No Content 2018-07-21T05:34:49,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] response is [] 2018-07-21T05:34:49,949 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] starting 2018-07-21T05:34:49,950 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,950 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] Got response: 204 No Content 2018-07-21T05:34:49,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] response is [] 2018-07-21T05:34:49,950 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] starting 2018-07-21T05:34:49,951 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,951 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] Got response: 204 No Content 2018-07-21T05:34:49,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] response is [] 2018-07-21T05:34:49,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] starting 2018-07-21T05:34:49,951 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,951 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] Got response: 204 No Content 2018-07-21T05:34:49,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] response is [] 2018-07-21T05:34:49,951 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] starting 2018-07-21T05:34:49,952 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,952 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] Got response: 204 No Content 2018-07-21T05:34:49,952 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] response is [] 2018-07-21T05:34:49,952 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] starting 2018-07-21T05:34:49,953 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,953 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] Got response: 204 No Content 2018-07-21T05:34:49,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] response is [] 2018-07-21T05:34:49,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] starting 2018-07-21T05:34:49,953 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,953 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] Got response: 204 No Content 2018-07-21T05:34:49,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] response is [] 2018-07-21T05:34:49,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] starting 2018-07-21T05:34:49,954 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,954 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] Got response: 204 No Content 2018-07-21T05:34:49,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] response is [] 2018-07-21T05:34:49,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] starting 2018-07-21T05:34:49,955 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,955 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] Got response: 204 No Content 2018-07-21T05:34:49,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] response is [] 2018-07-21T05:34:49,955 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] starting 2018-07-21T05:34:49,956 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,956 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] Got response: 204 No Content 2018-07-21T05:34:49,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] response is [] 2018-07-21T05:34:49,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] starting 2018-07-21T05:34:49,956 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,956 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] Got response: 204 No Content 2018-07-21T05:34:49,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] response is [] 2018-07-21T05:34:49,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] starting 2018-07-21T05:34:49,957 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,957 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] Got response: 204 No Content 2018-07-21T05:34:49,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] response is [] 2018-07-21T05:34:49,957 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] starting 2018-07-21T05:34:49,958 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,958 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] Got response: 204 No Content 2018-07-21T05:34:49,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] response is [] 2018-07-21T05:34:49,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] starting 2018-07-21T05:34:49,959 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,959 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] Got response: 204 No Content 2018-07-21T05:34:49,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] response is [] 2018-07-21T05:34:49,959 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] starting 2018-07-21T05:34:49,959 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,960 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] Got response: 204 No Content 2018-07-21T05:34:49,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] response is [] 2018-07-21T05:34:49,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] starting 2018-07-21T05:34:49,960 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,960 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] Got response: 204 No Content 2018-07-21T05:34:49,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] response is [] 2018-07-21T05:34:49,960 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] starting 2018-07-21T05:34:49,961 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,961 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] Got response: 204 No Content 2018-07-21T05:34:49,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] response is [] 2018-07-21T05:34:49,961 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] starting 2018-07-21T05:34:49,962 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,962 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] Got response: 204 No Content 2018-07-21T05:34:49,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] response is [] 2018-07-21T05:34:49,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] starting 2018-07-21T05:34:49,962 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,962 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] Got response: 204 No Content 2018-07-21T05:34:49,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] response is [] 2018-07-21T05:34:49,962 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:34:49,963 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,963 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:34:49,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:34:49,963 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] starting 2018-07-21T05:34:49,964 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,964 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] Got response: 204 No Content 2018-07-21T05:34:49,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] response is [] 2018-07-21T05:34:49,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:34:49,964 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,964 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:34:49,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:34:49,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] starting 2018-07-21T05:34:49,965 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,965 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] Got response: 204 No Content 2018-07-21T05:34:49,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] response is [] 2018-07-21T05:34:49,965 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] starting 2018-07-21T05:34:49,966 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,966 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] Got response: 204 No Content 2018-07-21T05:34:49,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] response is [] 2018-07-21T05:34:49,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:34:49,966 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,966 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:34:49,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:34:49,966 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:34:49,967 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,967 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:34:49,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:34:49,967 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:34:49,968 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,968 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:34:49,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:34:49,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:34:49,968 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,968 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:34:49,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:34:49,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:34:49,969 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,969 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:34:49,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:34:49,969 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:34:49,970 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,970 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:34:49,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:34:49,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:34:49,970 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,970 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:34:49,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:34:49,970 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:34:49,971 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,971 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:34:49,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:34:49,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] starting 2018-07-21T05:34:49,972 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,972 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] Got response: 204 No Content 2018-07-21T05:34:49,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] response is [] 2018-07-21T05:34:49,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] starting 2018-07-21T05:34:49,972 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,972 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] Got response: 204 No Content 2018-07-21T05:34:49,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] response is [] 2018-07-21T05:34:49,972 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] starting 2018-07-21T05:34:49,973 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,973 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] Got response: 204 No Content 2018-07-21T05:34:49,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] response is [] 2018-07-21T05:34:49,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] starting 2018-07-21T05:34:49,974 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,974 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] Got response: 204 No Content 2018-07-21T05:34:49,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] response is [] 2018-07-21T05:34:49,974 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] starting 2018-07-21T05:34:49,976 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,976 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] Got response: 204 No Content 2018-07-21T05:34:49,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] response is [] 2018-07-21T05:34:49,976 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] starting 2018-07-21T05:34:49,976 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,976 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] Got response: 204 No Content 2018-07-21T05:34:49,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] response is [] 2018-07-21T05:34:49,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] starting 2018-07-21T05:34:49,977 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,977 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] Got response: 204 No Content 2018-07-21T05:34:49,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] response is [] 2018-07-21T05:34:49,977 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] starting 2018-07-21T05:34:49,978 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,978 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] Got response: 204 No Content 2018-07-21T05:34:49,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] response is [] 2018-07-21T05:34:49,978 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] starting 2018-07-21T05:34:49,978 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,978 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] Got response: 204 No Content 2018-07-21T05:34:49,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] response is [] 2018-07-21T05:34:49,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] starting 2018-07-21T05:34:49,979 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,979 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] Got response: 204 No Content 2018-07-21T05:34:49,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] response is [] 2018-07-21T05:34:49,979 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] starting 2018-07-21T05:34:49,980 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,980 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] Got response: 204 No Content 2018-07-21T05:34:49,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] response is [] 2018-07-21T05:34:49,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:34:49,980 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,980 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:34:49,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:34:49,980 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:34:49,981 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,981 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:34:49,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:34:49,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:34:49,981 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,981 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:34:49,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:34:49,981 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:34:49,982 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,982 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:34:49,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:34:49,982 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:34:49,983 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,983 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:34:49,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:34:49,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:34:49,983 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,983 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:34:49,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:34:49,983 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:34:49,984 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,984 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:34:49,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:34:49,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:34:49,985 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,985 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:34:49,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:34:49,985 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:34:49,986 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,986 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:34:49,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:34:49,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:34:49,986 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,986 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:34:49,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:34:49,986 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:34:49,987 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,987 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:34:49,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:34:49,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:34:49,987 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,987 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:34:49,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:34:49,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:34:49,988 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,988 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:34:49,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:34:49,988 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:34:49,988 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,989 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:34:49,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:34:49,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:34:49,989 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,989 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:34:49,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:34:49,989 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:34:49,990 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,990 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:34:49,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:34:49,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:34:49,990 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,990 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:34:49,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:34:49,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:34:49,991 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,991 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:34:49,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:34:49,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:34:49,991 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,991 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:34:49,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:34:49,991 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] starting 2018-07-21T05:34:49,992 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,992 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] Got response: 204 No Content 2018-07-21T05:34:49,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] response is [] 2018-07-21T05:34:49,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] starting 2018-07-21T05:34:49,992 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,992 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] Got response: 204 No Content 2018-07-21T05:34:49,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] response is [] 2018-07-21T05:34:49,992 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] starting 2018-07-21T05:34:49,993 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,993 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] Got response: 204 No Content 2018-07-21T05:34:49,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] response is [] 2018-07-21T05:34:49,993 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] starting 2018-07-21T05:34:49,993 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,994 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] Got response: 204 No Content 2018-07-21T05:34:49,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] response is [] 2018-07-21T05:34:49,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] starting 2018-07-21T05:34:49,994 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,994 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] Got response: 204 No Content 2018-07-21T05:34:49,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] response is [] 2018-07-21T05:34:49,994 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] starting 2018-07-21T05:34:49,995 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,995 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] Got response: 204 No Content 2018-07-21T05:34:49,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] response is [] 2018-07-21T05:34:49,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] starting 2018-07-21T05:34:49,995 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,995 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] Got response: 204 No Content 2018-07-21T05:34:49,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] response is [] 2018-07-21T05:34:49,995 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] starting 2018-07-21T05:34:49,996 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,996 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] Got response: 204 No Content 2018-07-21T05:34:49,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] response is [] 2018-07-21T05:34:49,996 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] starting 2018-07-21T05:34:49,996 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,996 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] Got response: 204 No Content 2018-07-21T05:34:49,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] response is [] 2018-07-21T05:34:49,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] starting 2018-07-21T05:34:49,997 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,997 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] Got response: 204 No Content 2018-07-21T05:34:49,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] response is [] 2018-07-21T05:34:49,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:34:49,998 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,998 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:34:49,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:34:49,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:34:49,998 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,998 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:34:49,998 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:34:49,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:34:49,999 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:49,999 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:34:49,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:34:49,999 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:34:50,000 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:49 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,000 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:34:50,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:34:50,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:34:50,000 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,000 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:34:50,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:34:50,000 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:34:50,001 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,001 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:34:50,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:34:50,001 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:34:50,002 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,002 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:34:50,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:34:50,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:34:50,002 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,002 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:34:50,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:34:50,002 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:34:50,003 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,003 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:34:50,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:34:50,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:34:50,003 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,003 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:34:50,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:34:50,003 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:34:50,004 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,004 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:34:50,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:34:50,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:34:50,004 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,004 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:34:50,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:34:50,004 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:34:50,005 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,005 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:34:50,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:34:50,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:34:50,005 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,005 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:34:50,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:34:50,005 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:34:50,006 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,006 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:34:50,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:34:50,006 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:34:50,007 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,007 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:34:50,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:34:50,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:34:50,007 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,007 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:34:50,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:34:50,007 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:34:50,009 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,009 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:34:50,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:34:50,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:34:50,009 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,009 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:34:50,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:34:50,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:34:50,010 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,010 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:34:50,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:34:50,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:34:50,010 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,010 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:34:50,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:34:50,010 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:34:50,011 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,011 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:34:50,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:34:50,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:34:50,011 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,011 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:34:50,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:34:50,011 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:34:50,012 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,012 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:34:50,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:34:50,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:34:50,012 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,012 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:34:50,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:34:50,012 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:34:50,013 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,013 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:34:50,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:34:50,013 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:34:50,014 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,014 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:34:50,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:34:50,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:34:50,014 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,014 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:34:50,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:34:50,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:34:50,015 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,015 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:34:50,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:34:50,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:34:50,015 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,015 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:34:50,015 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:34:50,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:34:50,016 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,016 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:34:50,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:34:50,016 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:34:50,016 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,016 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:34:50,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:34:50,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:34:50,017 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,017 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:34:50,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:34:50,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:34:50,017 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,017 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:34:50,018 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,018 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:34:50,018 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,018 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:34:50,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:34:50,019 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,019 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:34:50,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:34:50,019 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:34:50,020 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,020 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:34:50,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:34:50,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:34:50,020 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,020 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:34:50,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:34:50,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:34:50,021 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:34:50 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:34:50,021 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:34:50,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:35:19,239 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:35:19,301 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:35:20,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:35:20,023 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,023 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:35:20,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:35:20,023 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:35:20,024 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,024 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:35:20,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:35:20,024 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:35:20,025 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,025 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:35:20,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:35:20,025 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:35:20,026 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,026 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:35:20,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:35:20,026 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:35:20,027 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,027 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:35:20,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:35:20,027 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:35:20,027 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,028 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:35:20,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:35:20,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:35:20,028 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,028 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:35:20,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:35:20,028 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:35:20,029 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,029 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:35:20,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:35:20,029 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:35:20,030 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,030 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:35:20,030 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:35:20,030 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] starting 2018-07-21T05:35:20,030 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,031 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] Got response: 204 No Content 2018-07-21T05:35:20,031 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_260] response is [] 2018-07-21T05:35:20,031 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] starting 2018-07-21T05:35:20,032 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,032 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] Got response: 204 No Content 2018-07-21T05:35:20,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_381] response is [] 2018-07-21T05:35:20,032 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:35:20,033 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,033 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:35:20,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:35:20,033 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] starting 2018-07-21T05:35:20,034 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,034 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] Got response: 204 No Content 2018-07-21T05:35:20,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_380] response is [] 2018-07-21T05:35:20,034 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:35:20,035 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,035 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:35:20,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:35:20,035 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:35:20,036 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,036 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:35:20,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:35:20,036 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] starting 2018-07-21T05:35:20,036 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,036 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] Got response: 204 No Content 2018-07-21T05:35:20,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_262] response is [] 2018-07-21T05:35:20,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] starting 2018-07-21T05:35:20,037 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,037 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] Got response: 204 No Content 2018-07-21T05:35:20,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_383] response is [] 2018-07-21T05:35:20,037 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:35:20,038 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,038 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:35:20,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:35:20,038 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:35:20,039 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,039 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:35:20,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:35:20,039 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] starting 2018-07-21T05:35:20,040 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,040 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] Got response: 204 No Content 2018-07-21T05:35:20,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_261] response is [] 2018-07-21T05:35:20,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] starting 2018-07-21T05:35:20,040 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,040 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] Got response: 204 No Content 2018-07-21T05:35:20,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_382] response is [] 2018-07-21T05:35:20,040 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:35:20,041 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,041 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:35:20,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:35:20,041 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:35:20,042 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,042 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:35:20,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:35:20,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] starting 2018-07-21T05:35:20,042 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,042 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] Got response: 204 No Content 2018-07-21T05:35:20,042 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_143] response is [] 2018-07-21T05:35:20,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] starting 2018-07-21T05:35:20,043 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,043 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] Got response: 204 No Content 2018-07-21T05:35:20,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_264] response is [] 2018-07-21T05:35:20,043 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] starting 2018-07-21T05:35:20,044 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,044 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] Got response: 204 No Content 2018-07-21T05:35:20,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_385] response is [] 2018-07-21T05:35:20,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:35:20,044 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,044 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:35:20,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:35:20,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:35:20,045 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,045 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:35:20,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:35:20,045 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:35:20,046 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,046 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:35:20,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:35:20,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] starting 2018-07-21T05:35:20,046 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,046 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] Got response: 204 No Content 2018-07-21T05:35:20,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_263] response is [] 2018-07-21T05:35:20,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] starting 2018-07-21T05:35:20,047 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,047 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] Got response: 204 No Content 2018-07-21T05:35:20,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_384] response is [] 2018-07-21T05:35:20,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:35:20,047 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,047 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:35:20,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:35:20,047 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:35:20,048 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,048 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:35:20,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:35:20,048 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] starting 2018-07-21T05:35:20,048 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,048 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] Got response: 204 No Content 2018-07-21T05:35:20,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_145] response is [] 2018-07-21T05:35:20,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] starting 2018-07-21T05:35:20,049 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,049 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] Got response: 204 No Content 2018-07-21T05:35:20,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_266] response is [] 2018-07-21T05:35:20,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] starting 2018-07-21T05:35:20,049 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,049 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] Got response: 204 No Content 2018-07-21T05:35:20,049 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_387] response is [] 2018-07-21T05:35:20,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:35:20,050 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,050 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:35:20,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:35:20,050 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:35:20,051 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,051 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:35:20,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:35:20,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] starting 2018-07-21T05:35:20,051 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,051 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] Got response: 204 No Content 2018-07-21T05:35:20,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_144] response is [] 2018-07-21T05:35:20,051 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] starting 2018-07-21T05:35:20,052 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,052 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] Got response: 204 No Content 2018-07-21T05:35:20,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_265] response is [] 2018-07-21T05:35:20,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] starting 2018-07-21T05:35:20,052 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,052 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] Got response: 204 No Content 2018-07-21T05:35:20,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_386] response is [] 2018-07-21T05:35:20,052 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] starting 2018-07-21T05:35:20,053 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,053 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] Got response: 204 No Content 2018-07-21T05:35:20,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_147] response is [] 2018-07-21T05:35:20,053 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] starting 2018-07-21T05:35:20,053 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,053 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] Got response: 204 No Content 2018-07-21T05:35:20,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_268] response is [] 2018-07-21T05:35:20,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] starting 2018-07-21T05:35:20,054 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,054 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] Got response: 204 No Content 2018-07-21T05:35:20,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_389] response is [] 2018-07-21T05:35:20,054 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] starting 2018-07-21T05:35:20,055 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,055 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] Got response: 204 No Content 2018-07-21T05:35:20,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_146] response is [] 2018-07-21T05:35:20,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] starting 2018-07-21T05:35:20,055 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,055 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] Got response: 204 No Content 2018-07-21T05:35:20,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_267] response is [] 2018-07-21T05:35:20,055 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] starting 2018-07-21T05:35:20,056 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,056 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] Got response: 204 No Content 2018-07-21T05:35:20,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_388] response is [] 2018-07-21T05:35:20,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:35:20,056 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,056 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:35:20,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:35:20,056 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] starting 2018-07-21T05:35:20,057 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,057 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] Got response: 204 No Content 2018-07-21T05:35:20,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_149] response is [] 2018-07-21T05:35:20,057 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:35:20,058 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,058 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:35:20,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:35:20,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] starting 2018-07-21T05:35:20,058 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,058 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] Got response: 204 No Content 2018-07-21T05:35:20,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_148] response is [] 2018-07-21T05:35:20,058 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] starting 2018-07-21T05:35:20,059 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,059 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] Got response: 204 No Content 2018-07-21T05:35:20,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_269] response is [] 2018-07-21T05:35:20,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:35:20,059 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,059 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:35:20,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:35:20,059 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:35:20,060 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,060 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:35:20,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:35:20,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:35:20,060 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,060 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:35:20,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:35:20,060 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] starting 2018-07-21T05:35:20,061 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,061 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] Got response: 204 No Content 2018-07-21T05:35:20,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_249] response is [] 2018-07-21T05:35:20,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:35:20,062 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,062 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:35:20,062 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:35:20,062 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:35:20,062 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,062 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:35:20,062 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:35:20,062 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:35:20,063 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,063 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:35:20,063 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:35:20,063 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:35:20,063 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,063 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:35:20,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:35:20,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:35:20,064 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,064 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:35:20,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:35:20,064 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:35:20,065 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,065 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:35:20,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:35:20,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:35:20,065 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,065 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:35:20,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:35:20,065 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:35:20,066 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,066 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:35:20,066 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:35:20,066 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] starting 2018-07-21T05:35:20,066 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,066 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] Got response: 204 No Content 2018-07-21T05:35:20,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_370] response is [] 2018-07-21T05:35:20,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:35:20,067 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,067 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:35:20,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:35:20,067 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] starting 2018-07-21T05:35:20,068 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,068 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] Got response: 204 No Content 2018-07-21T05:35:20,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_490] response is [] 2018-07-21T05:35:20,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:35:20,068 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,068 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:35:20,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:35:20,068 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:35:20,069 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,069 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:35:20,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:35:20,069 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] starting 2018-07-21T05:35:20,070 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,070 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] Got response: 204 No Content 2018-07-21T05:35:20,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_251] response is [] 2018-07-21T05:35:20,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] starting 2018-07-21T05:35:20,070 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,070 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] Got response: 204 No Content 2018-07-21T05:35:20,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_372] response is [] 2018-07-21T05:35:20,070 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:35:20,071 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,071 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:35:20,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:35:20,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] starting 2018-07-21T05:35:20,071 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,071 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] Got response: 204 No Content 2018-07-21T05:35:20,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_250] response is [] 2018-07-21T05:35:20,071 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] starting 2018-07-21T05:35:20,072 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,072 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] Got response: 204 No Content 2018-07-21T05:35:20,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_371] response is [] 2018-07-21T05:35:20,072 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:35:20,072 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,072 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:35:20,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:35:20,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:35:20,073 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,073 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:35:20,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:35:20,073 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:35:20,073 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,074 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:35:20,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:35:20,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] starting 2018-07-21T05:35:20,074 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,074 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] Got response: 204 No Content 2018-07-21T05:35:20,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_253] response is [] 2018-07-21T05:35:20,074 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] starting 2018-07-21T05:35:20,075 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,075 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] Got response: 204 No Content 2018-07-21T05:35:20,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_374] response is [] 2018-07-21T05:35:20,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:35:20,075 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,075 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:35:20,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:35:20,075 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:35:20,076 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,076 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:35:20,076 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:35:20,076 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:35:20,077 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,077 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:35:20,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:35:20,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] starting 2018-07-21T05:35:20,077 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,077 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] Got response: 204 No Content 2018-07-21T05:35:20,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_252] response is [] 2018-07-21T05:35:20,077 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] starting 2018-07-21T05:35:20,078 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,078 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] Got response: 204 No Content 2018-07-21T05:35:20,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_373] response is [] 2018-07-21T05:35:20,078 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:35:20,078 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,078 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:35:20,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:35:20,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:35:20,079 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,079 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:35:20,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:35:20,079 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] starting 2018-07-21T05:35:20,080 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,080 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] Got response: 204 No Content 2018-07-21T05:35:20,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_255] response is [] 2018-07-21T05:35:20,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] starting 2018-07-21T05:35:20,080 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,080 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] Got response: 204 No Content 2018-07-21T05:35:20,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_376] response is [] 2018-07-21T05:35:20,080 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:35:20,081 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,081 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:35:20,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:35:20,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:35:20,081 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,081 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:35:20,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:35:20,081 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:35:20,082 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,082 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:35:20,082 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:35:20,082 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] starting 2018-07-21T05:35:20,082 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,082 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] Got response: 204 No Content 2018-07-21T05:35:20,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_254] response is [] 2018-07-21T05:35:20,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] starting 2018-07-21T05:35:20,083 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,083 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] Got response: 204 No Content 2018-07-21T05:35:20,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_375] response is [] 2018-07-21T05:35:20,083 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:35:20,084 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,084 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:35:20,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:35:20,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] starting 2018-07-21T05:35:20,084 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,084 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] Got response: 204 No Content 2018-07-21T05:35:20,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_257] response is [] 2018-07-21T05:35:20,084 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] starting 2018-07-21T05:35:20,085 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,085 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] Got response: 204 No Content 2018-07-21T05:35:20,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_378] response is [] 2018-07-21T05:35:20,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:35:20,085 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,085 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:35:20,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:35:20,085 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] starting 2018-07-21T05:35:20,086 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,086 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] Got response: 204 No Content 2018-07-21T05:35:20,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_256] response is [] 2018-07-21T05:35:20,086 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] starting 2018-07-21T05:35:20,087 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,087 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] Got response: 204 No Content 2018-07-21T05:35:20,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_377] response is [] 2018-07-21T05:35:20,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:35:20,087 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,087 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:35:20,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:35:20,087 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:35:20,088 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,088 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:35:20,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:35:20,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] starting 2018-07-21T05:35:20,088 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,088 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] Got response: 204 No Content 2018-07-21T05:35:20,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_259] response is [] 2018-07-21T05:35:20,088 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:35:20,089 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,089 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:35:20,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:35:20,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] starting 2018-07-21T05:35:20,089 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,089 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] Got response: 204 No Content 2018-07-21T05:35:20,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_258] response is [] 2018-07-21T05:35:20,089 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] starting 2018-07-21T05:35:20,090 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,090 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] Got response: 204 No Content 2018-07-21T05:35:20,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_379] response is [] 2018-07-21T05:35:20,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] starting 2018-07-21T05:35:20,090 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,090 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] Got response: 204 No Content 2018-07-21T05:35:20,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_81] response is [] 2018-07-21T05:35:20,090 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:35:20,091 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,091 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:35:20,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:35:20,091 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] starting 2018-07-21T05:35:20,092 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,092 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] Got response: 204 No Content 2018-07-21T05:35:20,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_239] response is [] 2018-07-21T05:35:20,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] starting 2018-07-21T05:35:20,092 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,092 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] Got response: 204 No Content 2018-07-21T05:35:20,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_80] response is [] 2018-07-21T05:35:20,092 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:35:20,093 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,093 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:35:20,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:35:20,093 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] starting 2018-07-21T05:35:20,094 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,094 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] Got response: 204 No Content 2018-07-21T05:35:20,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_238] response is [] 2018-07-21T05:35:20,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] starting 2018-07-21T05:35:20,094 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,094 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] Got response: 204 No Content 2018-07-21T05:35:20,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_359] response is [] 2018-07-21T05:35:20,094 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] starting 2018-07-21T05:35:20,095 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,095 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] Got response: 204 No Content 2018-07-21T05:35:20,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_83] response is [] 2018-07-21T05:35:20,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] starting 2018-07-21T05:35:20,095 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,095 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] Got response: 204 No Content 2018-07-21T05:35:20,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_82] response is [] 2018-07-21T05:35:20,095 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:35:20,096 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,096 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:35:20,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:35:20,096 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] starting 2018-07-21T05:35:20,096 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,096 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] Got response: 204 No Content 2018-07-21T05:35:20,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_85] response is [] 2018-07-21T05:35:20,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] starting 2018-07-21T05:35:20,097 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,097 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] Got response: 204 No Content 2018-07-21T05:35:20,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_84] response is [] 2018-07-21T05:35:20,097 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] starting 2018-07-21T05:35:20,098 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,098 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] Got response: 204 No Content 2018-07-21T05:35:20,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_87] response is [] 2018-07-21T05:35:20,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] starting 2018-07-21T05:35:20,098 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,098 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] Got response: 204 No Content 2018-07-21T05:35:20,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_86] response is [] 2018-07-21T05:35:20,098 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] starting 2018-07-21T05:35:20,099 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,099 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] Got response: 204 No Content 2018-07-21T05:35:20,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_79] response is [] 2018-07-21T05:35:20,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] starting 2018-07-21T05:35:20,099 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,099 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] Got response: 204 No Content 2018-07-21T05:35:20,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_78] response is [] 2018-07-21T05:35:20,099 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] starting 2018-07-21T05:35:20,100 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,100 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] Got response: 204 No Content 2018-07-21T05:35:20,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_75] response is [] 2018-07-21T05:35:20,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] starting 2018-07-21T05:35:20,100 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,100 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] Got response: 204 No Content 2018-07-21T05:35:20,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_480] response is [] 2018-07-21T05:35:20,100 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] starting 2018-07-21T05:35:20,101 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,101 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] Got response: 204 No Content 2018-07-21T05:35:20,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_74] response is [] 2018-07-21T05:35:20,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] starting 2018-07-21T05:35:20,101 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,101 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] Got response: 204 No Content 2018-07-21T05:35:20,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_77] response is [] 2018-07-21T05:35:20,101 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] starting 2018-07-21T05:35:20,102 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,102 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] Got response: 204 No Content 2018-07-21T05:35:20,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_240] response is [] 2018-07-21T05:35:20,102 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] starting 2018-07-21T05:35:20,103 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,103 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] Got response: 204 No Content 2018-07-21T05:35:20,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_361] response is [] 2018-07-21T05:35:20,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] starting 2018-07-21T05:35:20,103 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,103 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] Got response: 204 No Content 2018-07-21T05:35:20,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_482] response is [] 2018-07-21T05:35:20,103 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] starting 2018-07-21T05:35:20,104 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,104 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] Got response: 204 No Content 2018-07-21T05:35:20,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_76] response is [] 2018-07-21T05:35:20,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] starting 2018-07-21T05:35:20,104 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,104 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] Got response: 204 No Content 2018-07-21T05:35:20,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_360] response is [] 2018-07-21T05:35:20,104 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] starting 2018-07-21T05:35:20,105 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,105 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] Got response: 204 No Content 2018-07-21T05:35:20,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_481] response is [] 2018-07-21T05:35:20,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] starting 2018-07-21T05:35:20,105 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,105 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] Got response: 204 No Content 2018-07-21T05:35:20,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_89] response is [] 2018-07-21T05:35:20,105 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] starting 2018-07-21T05:35:20,106 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,106 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] Got response: 204 No Content 2018-07-21T05:35:20,106 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_71] response is [] 2018-07-21T05:35:20,106 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:35:20,106 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,106 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:35:20,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:35:20,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] starting 2018-07-21T05:35:20,107 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,107 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] Got response: 204 No Content 2018-07-21T05:35:20,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_242] response is [] 2018-07-21T05:35:20,107 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] starting 2018-07-21T05:35:20,108 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,108 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] Got response: 204 No Content 2018-07-21T05:35:20,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_363] response is [] 2018-07-21T05:35:20,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] starting 2018-07-21T05:35:20,108 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,108 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] Got response: 204 No Content 2018-07-21T05:35:20,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_484] response is [] 2018-07-21T05:35:20,108 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] starting 2018-07-21T05:35:20,109 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,109 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] Got response: 204 No Content 2018-07-21T05:35:20,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_88] response is [] 2018-07-21T05:35:20,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] starting 2018-07-21T05:35:20,109 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,109 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] Got response: 204 No Content 2018-07-21T05:35:20,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_70] response is [] 2018-07-21T05:35:20,109 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:35:20,110 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,110 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:35:20,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:35:20,110 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] starting 2018-07-21T05:35:20,111 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,111 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] Got response: 204 No Content 2018-07-21T05:35:20,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_241] response is [] 2018-07-21T05:35:20,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] starting 2018-07-21T05:35:20,111 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,111 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] Got response: 204 No Content 2018-07-21T05:35:20,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_362] response is [] 2018-07-21T05:35:20,111 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] starting 2018-07-21T05:35:20,112 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,112 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] Got response: 204 No Content 2018-07-21T05:35:20,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_483] response is [] 2018-07-21T05:35:20,112 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] starting 2018-07-21T05:35:20,113 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,113 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] Got response: 204 No Content 2018-07-21T05:35:20,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_73] response is [] 2018-07-21T05:35:20,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:35:20,113 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,113 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:35:20,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:35:20,113 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] starting 2018-07-21T05:35:20,114 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,114 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] Got response: 204 No Content 2018-07-21T05:35:20,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_244] response is [] 2018-07-21T05:35:20,114 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] starting 2018-07-21T05:35:20,115 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,115 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] Got response: 204 No Content 2018-07-21T05:35:20,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_365] response is [] 2018-07-21T05:35:20,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] starting 2018-07-21T05:35:20,115 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,115 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] Got response: 204 No Content 2018-07-21T05:35:20,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_486] response is [] 2018-07-21T05:35:20,115 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] starting 2018-07-21T05:35:20,116 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,116 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] Got response: 204 No Content 2018-07-21T05:35:20,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_72] response is [] 2018-07-21T05:35:20,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:35:20,116 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,116 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:35:20,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:35:20,116 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] starting 2018-07-21T05:35:20,117 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,117 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] Got response: 204 No Content 2018-07-21T05:35:20,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_243] response is [] 2018-07-21T05:35:20,117 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] starting 2018-07-21T05:35:20,117 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,118 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] Got response: 204 No Content 2018-07-21T05:35:20,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_364] response is [] 2018-07-21T05:35:20,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] starting 2018-07-21T05:35:20,118 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,118 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] Got response: 204 No Content 2018-07-21T05:35:20,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_485] response is [] 2018-07-21T05:35:20,118 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:35:20,119 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,119 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:35:20,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:35:20,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] starting 2018-07-21T05:35:20,119 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,119 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] Got response: 204 No Content 2018-07-21T05:35:20,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_246] response is [] 2018-07-21T05:35:20,119 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] starting 2018-07-21T05:35:20,120 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,120 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] Got response: 204 No Content 2018-07-21T05:35:20,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_367] response is [] 2018-07-21T05:35:20,120 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] starting 2018-07-21T05:35:20,121 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,121 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] Got response: 204 No Content 2018-07-21T05:35:20,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_488] response is [] 2018-07-21T05:35:20,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:35:20,121 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,121 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:35:20,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:35:20,121 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] starting 2018-07-21T05:35:20,122 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,122 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] Got response: 204 No Content 2018-07-21T05:35:20,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_245] response is [] 2018-07-21T05:35:20,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] starting 2018-07-21T05:35:20,122 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,122 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] Got response: 204 No Content 2018-07-21T05:35:20,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_366] response is [] 2018-07-21T05:35:20,122 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] starting 2018-07-21T05:35:20,123 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,123 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] Got response: 204 No Content 2018-07-21T05:35:20,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_487] response is [] 2018-07-21T05:35:20,123 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:35:20,124 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,124 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:35:20,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:35:20,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] starting 2018-07-21T05:35:20,124 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,124 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] Got response: 204 No Content 2018-07-21T05:35:20,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_248] response is [] 2018-07-21T05:35:20,124 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] starting 2018-07-21T05:35:20,125 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,125 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] Got response: 204 No Content 2018-07-21T05:35:20,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_369] response is [] 2018-07-21T05:35:20,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:35:20,125 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,125 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:35:20,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:35:20,125 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] starting 2018-07-21T05:35:20,126 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,126 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] Got response: 204 No Content 2018-07-21T05:35:20,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_247] response is [] 2018-07-21T05:35:20,126 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] starting 2018-07-21T05:35:20,126 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,126 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] Got response: 204 No Content 2018-07-21T05:35:20,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_368] response is [] 2018-07-21T05:35:20,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] starting 2018-07-21T05:35:20,127 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,127 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] Got response: 204 No Content 2018-07-21T05:35:20,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_489] response is [] 2018-07-21T05:35:20,127 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] starting 2018-07-21T05:35:20,128 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,128 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] Got response: 204 No Content 2018-07-21T05:35:20,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_92] response is [] 2018-07-21T05:35:20,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:35:20,128 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,128 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:35:20,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:35:20,128 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] starting 2018-07-21T05:35:20,129 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,129 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] Got response: 204 No Content 2018-07-21T05:35:20,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_228] response is [] 2018-07-21T05:35:20,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] starting 2018-07-21T05:35:20,129 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,129 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] Got response: 204 No Content 2018-07-21T05:35:20,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_349] response is [] 2018-07-21T05:35:20,129 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] starting 2018-07-21T05:35:20,130 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,130 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] Got response: 204 No Content 2018-07-21T05:35:20,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_91] response is [] 2018-07-21T05:35:20,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:35:20,130 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,130 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:35:20,130 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:35:20,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] starting 2018-07-21T05:35:20,131 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,131 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] Got response: 204 No Content 2018-07-21T05:35:20,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_227] response is [] 2018-07-21T05:35:20,131 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] starting 2018-07-21T05:35:20,132 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,132 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] Got response: 204 No Content 2018-07-21T05:35:20,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_348] response is [] 2018-07-21T05:35:20,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] starting 2018-07-21T05:35:20,132 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,132 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] Got response: 204 No Content 2018-07-21T05:35:20,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_469] response is [] 2018-07-21T05:35:20,132 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] starting 2018-07-21T05:35:20,133 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,133 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] Got response: 204 No Content 2018-07-21T05:35:20,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_94] response is [] 2018-07-21T05:35:20,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:35:20,133 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,133 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:35:20,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:35:20,133 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] starting 2018-07-21T05:35:20,134 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,134 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] Got response: 204 No Content 2018-07-21T05:35:20,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_93] response is [] 2018-07-21T05:35:20,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:35:20,134 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,134 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:35:20,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:35:20,134 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] starting 2018-07-21T05:35:20,135 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,135 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] Got response: 204 No Content 2018-07-21T05:35:20,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_229] response is [] 2018-07-21T05:35:20,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] starting 2018-07-21T05:35:20,135 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,135 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] Got response: 204 No Content 2018-07-21T05:35:20,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_96] response is [] 2018-07-21T05:35:20,135 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] starting 2018-07-21T05:35:20,136 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,136 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] Got response: 204 No Content 2018-07-21T05:35:20,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_95] response is [] 2018-07-21T05:35:20,136 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] starting 2018-07-21T05:35:20,136 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,136 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] Got response: 204 No Content 2018-07-21T05:35:20,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_98] response is [] 2018-07-21T05:35:20,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] starting 2018-07-21T05:35:20,137 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,137 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] Got response: 204 No Content 2018-07-21T05:35:20,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_97] response is [] 2018-07-21T05:35:20,137 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] starting 2018-07-21T05:35:20,138 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,138 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] Got response: 204 No Content 2018-07-21T05:35:20,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_90] response is [] 2018-07-21T05:35:20,138 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] starting 2018-07-21T05:35:20,139 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,139 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] Got response: 204 No Content 2018-07-21T05:35:20,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_68] response is [] 2018-07-21T05:35:20,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] starting 2018-07-21T05:35:20,139 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,139 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] Got response: 204 No Content 2018-07-21T05:35:20,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_67] response is [] 2018-07-21T05:35:20,139 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] starting 2018-07-21T05:35:20,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,140 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] Got response: 204 No Content 2018-07-21T05:35:20,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_69] response is [] 2018-07-21T05:35:20,140 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] starting 2018-07-21T05:35:20,140 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,140 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] Got response: 204 No Content 2018-07-21T05:35:20,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_64] response is [] 2018-07-21T05:35:20,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] starting 2018-07-21T05:35:20,141 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,141 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] Got response: 204 No Content 2018-07-21T05:35:20,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_63] response is [] 2018-07-21T05:35:20,141 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] starting 2018-07-21T05:35:20,142 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,142 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] Got response: 204 No Content 2018-07-21T05:35:20,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_66] response is [] 2018-07-21T05:35:20,142 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] starting 2018-07-21T05:35:20,142 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,142 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] Got response: 204 No Content 2018-07-21T05:35:20,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_350] response is [] 2018-07-21T05:35:20,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] starting 2018-07-21T05:35:20,143 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,143 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] Got response: 204 No Content 2018-07-21T05:35:20,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_471] response is [] 2018-07-21T05:35:20,143 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] starting 2018-07-21T05:35:20,144 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,144 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] Got response: 204 No Content 2018-07-21T05:35:20,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_65] response is [] 2018-07-21T05:35:20,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] starting 2018-07-21T05:35:20,144 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,144 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] Got response: 204 No Content 2018-07-21T05:35:20,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_470] response is [] 2018-07-21T05:35:20,144 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] starting 2018-07-21T05:35:20,145 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,145 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] Got response: 204 No Content 2018-07-21T05:35:20,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_60] response is [] 2018-07-21T05:35:20,145 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:35:20,146 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,146 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:35:20,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:35:20,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] starting 2018-07-21T05:35:20,146 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,146 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] Got response: 204 No Content 2018-07-21T05:35:20,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_231] response is [] 2018-07-21T05:35:20,146 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] starting 2018-07-21T05:35:20,147 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,147 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] Got response: 204 No Content 2018-07-21T05:35:20,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_352] response is [] 2018-07-21T05:35:20,147 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] starting 2018-07-21T05:35:20,147 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,147 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] Got response: 204 No Content 2018-07-21T05:35:20,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_473] response is [] 2018-07-21T05:35:20,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] starting 2018-07-21T05:35:20,148 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,148 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] Got response: 204 No Content 2018-07-21T05:35:20,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_99] response is [] 2018-07-21T05:35:20,148 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] starting 2018-07-21T05:35:20,149 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,149 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] Got response: 204 No Content 2018-07-21T05:35:20,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_230] response is [] 2018-07-21T05:35:20,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] starting 2018-07-21T05:35:20,149 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,149 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] Got response: 204 No Content 2018-07-21T05:35:20,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_351] response is [] 2018-07-21T05:35:20,149 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] starting 2018-07-21T05:35:20,150 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,150 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] Got response: 204 No Content 2018-07-21T05:35:20,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_472] response is [] 2018-07-21T05:35:20,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] starting 2018-07-21T05:35:20,150 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,150 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] Got response: 204 No Content 2018-07-21T05:35:20,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_62] response is [] 2018-07-21T05:35:20,150 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:35:20,151 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,151 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:35:20,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:35:20,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] starting 2018-07-21T05:35:20,151 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,151 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] Got response: 204 No Content 2018-07-21T05:35:20,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_233] response is [] 2018-07-21T05:35:20,151 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] starting 2018-07-21T05:35:20,152 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,152 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] Got response: 204 No Content 2018-07-21T05:35:20,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_354] response is [] 2018-07-21T05:35:20,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] starting 2018-07-21T05:35:20,152 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,152 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] Got response: 204 No Content 2018-07-21T05:35:20,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_475] response is [] 2018-07-21T05:35:20,152 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] starting 2018-07-21T05:35:20,153 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,153 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] Got response: 204 No Content 2018-07-21T05:35:20,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_61] response is [] 2018-07-21T05:35:20,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:35:20,153 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,153 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:35:20,153 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:35:20,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] starting 2018-07-21T05:35:20,154 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,154 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] Got response: 204 No Content 2018-07-21T05:35:20,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_232] response is [] 2018-07-21T05:35:20,154 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] starting 2018-07-21T05:35:20,155 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,155 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] Got response: 204 No Content 2018-07-21T05:35:20,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_353] response is [] 2018-07-21T05:35:20,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] starting 2018-07-21T05:35:20,155 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,155 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] Got response: 204 No Content 2018-07-21T05:35:20,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_474] response is [] 2018-07-21T05:35:20,155 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:35:20,156 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,156 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:35:20,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:35:20,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] starting 2018-07-21T05:35:20,156 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,156 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] Got response: 204 No Content 2018-07-21T05:35:20,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_235] response is [] 2018-07-21T05:35:20,156 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] starting 2018-07-21T05:35:20,157 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,157 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] Got response: 204 No Content 2018-07-21T05:35:20,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_356] response is [] 2018-07-21T05:35:20,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] starting 2018-07-21T05:35:20,157 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,157 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] Got response: 204 No Content 2018-07-21T05:35:20,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_477] response is [] 2018-07-21T05:35:20,157 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:35:20,158 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,158 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:35:20,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:35:20,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] starting 2018-07-21T05:35:20,158 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,158 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] Got response: 204 No Content 2018-07-21T05:35:20,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_234] response is [] 2018-07-21T05:35:20,158 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] starting 2018-07-21T05:35:20,159 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,159 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] Got response: 204 No Content 2018-07-21T05:35:20,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_355] response is [] 2018-07-21T05:35:20,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] starting 2018-07-21T05:35:20,159 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,159 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] Got response: 204 No Content 2018-07-21T05:35:20,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_476] response is [] 2018-07-21T05:35:20,159 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:35:20,160 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,160 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:35:20,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:35:20,160 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] starting 2018-07-21T05:35:20,161 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,161 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] Got response: 204 No Content 2018-07-21T05:35:20,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_237] response is [] 2018-07-21T05:35:20,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] starting 2018-07-21T05:35:20,161 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,161 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] Got response: 204 No Content 2018-07-21T05:35:20,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_358] response is [] 2018-07-21T05:35:20,161 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] starting 2018-07-21T05:35:20,162 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,162 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] Got response: 204 No Content 2018-07-21T05:35:20,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_479] response is [] 2018-07-21T05:35:20,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:35:20,162 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,162 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:35:20,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:35:20,162 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] starting 2018-07-21T05:35:20,163 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,163 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] Got response: 204 No Content 2018-07-21T05:35:20,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_236] response is [] 2018-07-21T05:35:20,163 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] starting 2018-07-21T05:35:20,164 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,164 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] Got response: 204 No Content 2018-07-21T05:35:20,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_357] response is [] 2018-07-21T05:35:20,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] starting 2018-07-21T05:35:20,164 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,164 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] Got response: 204 No Content 2018-07-21T05:35:20,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_478] response is [] 2018-07-21T05:35:20,164 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:35:20,165 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,165 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:35:20,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:35:20,165 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:35:20,165 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,165 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:35:20,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:35:20,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] starting 2018-07-21T05:35:20,166 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,166 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] Got response: 204 No Content 2018-07-21T05:35:20,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_181] response is [] 2018-07-21T05:35:20,166 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] starting 2018-07-21T05:35:20,166 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,166 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] Got response: 204 No Content 2018-07-21T05:35:20,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_180] response is [] 2018-07-21T05:35:20,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] starting 2018-07-21T05:35:20,167 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,167 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] Got response: 204 No Content 2018-07-21T05:35:20,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_183] response is [] 2018-07-21T05:35:20,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] starting 2018-07-21T05:35:20,168 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,168 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] Got response: 204 No Content 2018-07-21T05:35:20,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_182] response is [] 2018-07-21T05:35:20,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] starting 2018-07-21T05:35:20,168 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,168 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] Got response: 204 No Content 2018-07-21T05:35:20,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_185] response is [] 2018-07-21T05:35:20,168 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] starting 2018-07-21T05:35:20,169 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,169 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] Got response: 204 No Content 2018-07-21T05:35:20,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_184] response is [] 2018-07-21T05:35:20,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:35:20,169 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,169 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:35:20,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:35:20,169 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] starting 2018-07-21T05:35:20,170 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,170 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] Got response: 204 No Content 2018-07-21T05:35:20,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_187] response is [] 2018-07-21T05:35:20,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:35:20,170 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,170 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:35:20,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:35:20,170 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] starting 2018-07-21T05:35:20,171 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,171 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] Got response: 204 No Content 2018-07-21T05:35:20,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_186] response is [] 2018-07-21T05:35:20,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:35:20,171 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,171 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:35:20,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:35:20,171 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] starting 2018-07-21T05:35:20,172 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,172 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] Got response: 204 No Content 2018-07-21T05:35:20,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_189] response is [] 2018-07-21T05:35:20,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:35:20,172 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,172 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:35:20,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:35:20,172 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] starting 2018-07-21T05:35:20,173 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,173 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] Got response: 204 No Content 2018-07-21T05:35:20,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_188] response is [] 2018-07-21T05:35:20,173 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:35:20,174 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,174 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:35:20,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:35:20,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:35:20,174 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,174 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:35:20,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:35:20,174 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:35:20,175 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,175 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:35:20,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:35:20,175 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:35:20,175 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,175 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:35:20,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:35:20,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:35:20,176 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,176 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:35:20,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:35:20,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:35:20,177 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,177 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:35:20,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:35:20,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:35:20,177 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,177 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:35:20,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:35:20,177 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] starting 2018-07-21T05:35:20,178 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,178 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] Got response: 204 No Content 2018-07-21T05:35:20,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_170] response is [] 2018-07-21T05:35:20,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] starting 2018-07-21T05:35:20,178 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,178 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] Got response: 204 No Content 2018-07-21T05:35:20,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_291] response is [] 2018-07-21T05:35:20,178 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] starting 2018-07-21T05:35:20,179 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,179 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] Got response: 204 No Content 2018-07-21T05:35:20,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_290] response is [] 2018-07-21T05:35:20,179 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] starting 2018-07-21T05:35:20,180 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,180 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] Got response: 204 No Content 2018-07-21T05:35:20,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_172] response is [] 2018-07-21T05:35:20,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] starting 2018-07-21T05:35:20,180 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,180 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] Got response: 204 No Content 2018-07-21T05:35:20,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_293] response is [] 2018-07-21T05:35:20,180 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] starting 2018-07-21T05:35:20,181 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,181 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] Got response: 204 No Content 2018-07-21T05:35:20,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_171] response is [] 2018-07-21T05:35:20,181 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] starting 2018-07-21T05:35:20,182 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,182 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] Got response: 204 No Content 2018-07-21T05:35:20,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_292] response is [] 2018-07-21T05:35:20,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] starting 2018-07-21T05:35:20,182 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,182 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] Got response: 204 No Content 2018-07-21T05:35:20,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_174] response is [] 2018-07-21T05:35:20,182 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] starting 2018-07-21T05:35:20,183 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,183 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] Got response: 204 No Content 2018-07-21T05:35:20,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_295] response is [] 2018-07-21T05:35:20,183 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] starting 2018-07-21T05:35:20,184 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,184 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] Got response: 204 No Content 2018-07-21T05:35:20,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_173] response is [] 2018-07-21T05:35:20,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] starting 2018-07-21T05:35:20,184 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,184 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] Got response: 204 No Content 2018-07-21T05:35:20,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_294] response is [] 2018-07-21T05:35:20,184 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:35:20,185 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,185 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:35:20,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:35:20,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:35:20,185 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,185 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:35:20,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:35:20,185 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] starting 2018-07-21T05:35:20,186 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,186 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] Got response: 204 No Content 2018-07-21T05:35:20,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_176] response is [] 2018-07-21T05:35:20,186 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] starting 2018-07-21T05:35:20,186 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,186 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] Got response: 204 No Content 2018-07-21T05:35:20,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_297] response is [] 2018-07-21T05:35:20,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:35:20,187 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,187 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:35:20,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:35:20,187 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:35:20,187 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,187 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:35:20,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:35:20,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] starting 2018-07-21T05:35:20,188 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,188 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] Got response: 204 No Content 2018-07-21T05:35:20,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_175] response is [] 2018-07-21T05:35:20,188 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] starting 2018-07-21T05:35:20,189 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,189 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] Got response: 204 No Content 2018-07-21T05:35:20,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_296] response is [] 2018-07-21T05:35:20,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:35:20,189 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,189 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:35:20,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:35:20,189 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:35:20,190 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,190 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:35:20,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:35:20,190 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] starting 2018-07-21T05:35:20,191 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,191 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] Got response: 204 No Content 2018-07-21T05:35:20,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_178] response is [] 2018-07-21T05:35:20,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] starting 2018-07-21T05:35:20,191 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,191 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] Got response: 204 No Content 2018-07-21T05:35:20,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_299] response is [] 2018-07-21T05:35:20,191 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:35:20,192 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,192 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:35:20,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:35:20,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:35:20,192 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,192 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:35:20,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:35:20,192 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] starting 2018-07-21T05:35:20,193 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,193 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] Got response: 204 No Content 2018-07-21T05:35:20,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_177] response is [] 2018-07-21T05:35:20,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] starting 2018-07-21T05:35:20,193 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,193 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] Got response: 204 No Content 2018-07-21T05:35:20,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_298] response is [] 2018-07-21T05:35:20,193 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:35:20,194 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,194 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:35:20,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:35:20,194 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:35:20,194 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,194 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:35:20,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:35:20,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] starting 2018-07-21T05:35:20,195 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,195 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] Got response: 204 No Content 2018-07-21T05:35:20,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_179] response is [] 2018-07-21T05:35:20,195 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:35:20,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:35:20,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:35:20,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:35:20,196 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,196 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:35:20,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:35:20,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:35:20,197 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,197 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:35:20,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:35:20,197 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:35:20,198 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,198 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:35:20,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:35:20,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:35:20,198 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,198 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:35:20,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:35:20,198 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:35:20,199 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,199 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:35:20,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:35:20,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] starting 2018-07-21T05:35:20,199 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,199 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] Got response: 204 No Content 2018-07-21T05:35:20,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_280] response is [] 2018-07-21T05:35:20,199 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] starting 2018-07-21T05:35:20,200 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,200 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] Got response: 204 No Content 2018-07-21T05:35:20,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_161] response is [] 2018-07-21T05:35:20,200 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] starting 2018-07-21T05:35:20,200 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,200 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] Got response: 204 No Content 2018-07-21T05:35:20,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_282] response is [] 2018-07-21T05:35:20,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] starting 2018-07-21T05:35:20,201 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,201 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] Got response: 204 No Content 2018-07-21T05:35:20,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_160] response is [] 2018-07-21T05:35:20,201 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] starting 2018-07-21T05:35:20,202 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,202 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] Got response: 204 No Content 2018-07-21T05:35:20,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_281] response is [] 2018-07-21T05:35:20,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] starting 2018-07-21T05:35:20,202 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,202 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] Got response: 204 No Content 2018-07-21T05:35:20,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_163] response is [] 2018-07-21T05:35:20,202 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] starting 2018-07-21T05:35:20,202 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,203 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] Got response: 204 No Content 2018-07-21T05:35:20,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_284] response is [] 2018-07-21T05:35:20,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] starting 2018-07-21T05:35:20,203 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,203 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] Got response: 204 No Content 2018-07-21T05:35:20,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_162] response is [] 2018-07-21T05:35:20,203 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] starting 2018-07-21T05:35:20,204 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,204 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] Got response: 204 No Content 2018-07-21T05:35:20,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_283] response is [] 2018-07-21T05:35:20,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:35:20,204 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,204 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:35:20,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:35:20,204 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] starting 2018-07-21T05:35:20,205 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,205 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] Got response: 204 No Content 2018-07-21T05:35:20,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_165] response is [] 2018-07-21T05:35:20,205 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] starting 2018-07-21T05:35:20,206 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,206 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] Got response: 204 No Content 2018-07-21T05:35:20,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_286] response is [] 2018-07-21T05:35:20,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:35:20,206 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,206 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:35:20,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:35:20,206 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] starting 2018-07-21T05:35:20,207 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,207 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] Got response: 204 No Content 2018-07-21T05:35:20,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_164] response is [] 2018-07-21T05:35:20,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] starting 2018-07-21T05:35:20,207 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,207 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] Got response: 204 No Content 2018-07-21T05:35:20,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_285] response is [] 2018-07-21T05:35:20,207 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:35:20,208 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,208 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:35:20,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:35:20,208 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] starting 2018-07-21T05:35:20,208 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,209 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] Got response: 204 No Content 2018-07-21T05:35:20,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_167] response is [] 2018-07-21T05:35:20,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] starting 2018-07-21T05:35:20,209 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,209 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] Got response: 204 No Content 2018-07-21T05:35:20,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_288] response is [] 2018-07-21T05:35:20,209 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:35:20,210 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,210 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:35:20,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:35:20,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] starting 2018-07-21T05:35:20,210 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,210 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] Got response: 204 No Content 2018-07-21T05:35:20,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_166] response is [] 2018-07-21T05:35:20,210 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] starting 2018-07-21T05:35:20,211 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,211 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] Got response: 204 No Content 2018-07-21T05:35:20,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_287] response is [] 2018-07-21T05:35:20,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:35:20,211 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,211 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:35:20,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:35:20,211 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] starting 2018-07-21T05:35:20,212 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,212 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] Got response: 204 No Content 2018-07-21T05:35:20,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_169] response is [] 2018-07-21T05:35:20,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:35:20,212 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,212 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:35:20,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:35:20,212 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] starting 2018-07-21T05:35:20,213 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,213 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] Got response: 204 No Content 2018-07-21T05:35:20,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_168] response is [] 2018-07-21T05:35:20,213 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] starting 2018-07-21T05:35:20,214 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,214 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] Got response: 204 No Content 2018-07-21T05:35:20,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_289] response is [] 2018-07-21T05:35:20,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:35:20,214 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,214 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:35:20,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:35:20,214 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:35:20,215 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,215 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:35:20,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:35:20,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:35:20,215 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,215 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:35:20,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:35:20,215 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:35:20,216 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,216 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:35:20,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:35:20,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:35:20,216 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,216 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:35:20,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:35:20,216 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] starting 2018-07-21T05:35:20,217 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,217 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] Got response: 204 No Content 2018-07-21T05:35:20,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_390] response is [] 2018-07-21T05:35:20,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] starting 2018-07-21T05:35:20,217 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,217 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] Got response: 204 No Content 2018-07-21T05:35:20,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_150] response is [] 2018-07-21T05:35:20,217 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] starting 2018-07-21T05:35:20,218 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,218 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] Got response: 204 No Content 2018-07-21T05:35:20,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_271] response is [] 2018-07-21T05:35:20,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] starting 2018-07-21T05:35:20,218 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,218 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] Got response: 204 No Content 2018-07-21T05:35:20,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_392] response is [] 2018-07-21T05:35:20,218 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] starting 2018-07-21T05:35:20,219 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,219 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] Got response: 204 No Content 2018-07-21T05:35:20,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_270] response is [] 2018-07-21T05:35:20,219 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] starting 2018-07-21T05:35:20,219 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,220 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] Got response: 204 No Content 2018-07-21T05:35:20,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_391] response is [] 2018-07-21T05:35:20,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] starting 2018-07-21T05:35:20,220 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,220 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] Got response: 204 No Content 2018-07-21T05:35:20,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_152] response is [] 2018-07-21T05:35:20,220 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] starting 2018-07-21T05:35:20,221 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,221 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] Got response: 204 No Content 2018-07-21T05:35:20,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_273] response is [] 2018-07-21T05:35:20,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] starting 2018-07-21T05:35:20,221 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,221 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] Got response: 204 No Content 2018-07-21T05:35:20,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_394] response is [] 2018-07-21T05:35:20,221 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] starting 2018-07-21T05:35:20,222 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,222 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] Got response: 204 No Content 2018-07-21T05:35:20,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_151] response is [] 2018-07-21T05:35:20,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] starting 2018-07-21T05:35:20,222 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,222 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] Got response: 204 No Content 2018-07-21T05:35:20,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_272] response is [] 2018-07-21T05:35:20,222 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] starting 2018-07-21T05:35:20,223 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,223 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] Got response: 204 No Content 2018-07-21T05:35:20,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_393] response is [] 2018-07-21T05:35:20,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:35:20,223 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,223 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:35:20,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:35:20,223 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] starting 2018-07-21T05:35:20,224 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,224 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] Got response: 204 No Content 2018-07-21T05:35:20,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_154] response is [] 2018-07-21T05:35:20,224 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] starting 2018-07-21T05:35:20,224 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,224 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] Got response: 204 No Content 2018-07-21T05:35:20,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_275] response is [] 2018-07-21T05:35:20,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] starting 2018-07-21T05:35:20,225 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,225 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] Got response: 204 No Content 2018-07-21T05:35:20,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_396] response is [] 2018-07-21T05:35:20,225 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:35:20,226 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,226 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:35:20,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:35:20,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] starting 2018-07-21T05:35:20,226 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,226 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] Got response: 204 No Content 2018-07-21T05:35:20,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_153] response is [] 2018-07-21T05:35:20,226 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] starting 2018-07-21T05:35:20,227 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,227 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] Got response: 204 No Content 2018-07-21T05:35:20,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_274] response is [] 2018-07-21T05:35:20,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] starting 2018-07-21T05:35:20,227 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,227 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] Got response: 204 No Content 2018-07-21T05:35:20,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_395] response is [] 2018-07-21T05:35:20,227 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:35:20,228 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,228 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:35:20,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:35:20,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] starting 2018-07-21T05:35:20,228 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,228 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] Got response: 204 No Content 2018-07-21T05:35:20,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_156] response is [] 2018-07-21T05:35:20,228 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] starting 2018-07-21T05:35:20,229 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,229 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] Got response: 204 No Content 2018-07-21T05:35:20,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_277] response is [] 2018-07-21T05:35:20,229 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] starting 2018-07-21T05:35:20,230 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,230 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] Got response: 204 No Content 2018-07-21T05:35:20,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_398] response is [] 2018-07-21T05:35:20,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:35:20,230 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,230 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:35:20,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:35:20,230 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] starting 2018-07-21T05:35:20,231 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,231 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] Got response: 204 No Content 2018-07-21T05:35:20,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_155] response is [] 2018-07-21T05:35:20,231 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] starting 2018-07-21T05:35:20,231 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,231 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] Got response: 204 No Content 2018-07-21T05:35:20,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_276] response is [] 2018-07-21T05:35:20,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] starting 2018-07-21T05:35:20,232 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,232 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] Got response: 204 No Content 2018-07-21T05:35:20,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_397] response is [] 2018-07-21T05:35:20,232 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] starting 2018-07-21T05:35:20,233 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,233 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] Got response: 204 No Content 2018-07-21T05:35:20,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_158] response is [] 2018-07-21T05:35:20,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] starting 2018-07-21T05:35:20,233 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,233 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] Got response: 204 No Content 2018-07-21T05:35:20,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_279] response is [] 2018-07-21T05:35:20,233 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:35:20,234 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,234 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:35:20,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:35:20,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] starting 2018-07-21T05:35:20,234 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,234 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] Got response: 204 No Content 2018-07-21T05:35:20,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_157] response is [] 2018-07-21T05:35:20,234 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] starting 2018-07-21T05:35:20,235 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,235 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] Got response: 204 No Content 2018-07-21T05:35:20,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_278] response is [] 2018-07-21T05:35:20,235 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] starting 2018-07-21T05:35:20,235 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,235 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] Got response: 204 No Content 2018-07-21T05:35:20,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_399] response is [] 2018-07-21T05:35:20,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] starting 2018-07-21T05:35:20,236 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,236 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] Got response: 204 No Content 2018-07-21T05:35:20,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_159] response is [] 2018-07-21T05:35:20,236 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] starting 2018-07-21T05:35:20,237 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,237 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] Got response: 204 No Content 2018-07-21T05:35:20,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_415] response is [] 2018-07-21T05:35:20,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] starting 2018-07-21T05:35:20,237 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,237 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] Got response: 204 No Content 2018-07-21T05:35:20,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_414] response is [] 2018-07-21T05:35:20,237 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] starting 2018-07-21T05:35:20,238 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,238 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] Got response: 204 No Content 2018-07-21T05:35:20,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_417] response is [] 2018-07-21T05:35:20,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] starting 2018-07-21T05:35:20,238 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,238 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] Got response: 204 No Content 2018-07-21T05:35:20,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_416] response is [] 2018-07-21T05:35:20,238 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] starting 2018-07-21T05:35:20,239 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,239 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] Got response: 204 No Content 2018-07-21T05:35:20,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_419] response is [] 2018-07-21T05:35:20,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] starting 2018-07-21T05:35:20,239 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,239 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] Got response: 204 No Content 2018-07-21T05:35:20,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_418] response is [] 2018-07-21T05:35:20,239 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] starting 2018-07-21T05:35:20,240 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,240 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] Got response: 204 No Content 2018-07-21T05:35:20,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_420] response is [] 2018-07-21T05:35:20,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] starting 2018-07-21T05:35:20,240 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,240 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] Got response: 204 No Content 2018-07-21T05:35:20,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_301] response is [] 2018-07-21T05:35:20,240 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] starting 2018-07-21T05:35:20,241 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,241 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] Got response: 204 No Content 2018-07-21T05:35:20,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_422] response is [] 2018-07-21T05:35:20,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] starting 2018-07-21T05:35:20,241 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,241 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] Got response: 204 No Content 2018-07-21T05:35:20,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00] response is [] 2018-07-21T05:35:20,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] starting 2018-07-21T05:35:20,242 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,242 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] Got response: 204 No Content 2018-07-21T05:35:20,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_300] response is [] 2018-07-21T05:35:20,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] starting 2018-07-21T05:35:20,242 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,242 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] Got response: 204 No Content 2018-07-21T05:35:20,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_421] response is [] 2018-07-21T05:35:20,242 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] starting 2018-07-21T05:35:20,243 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,243 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] Got response: 204 No Content 2018-07-21T05:35:20,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_303] response is [] 2018-07-21T05:35:20,243 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] starting 2018-07-21T05:35:20,244 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,244 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] Got response: 204 No Content 2018-07-21T05:35:20,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_424] response is [] 2018-07-21T05:35:20,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] starting 2018-07-21T05:35:20,244 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,244 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] Got response: 204 No Content 2018-07-21T05:35:20,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_302] response is [] 2018-07-21T05:35:20,244 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] starting 2018-07-21T05:35:20,245 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,245 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] Got response: 204 No Content 2018-07-21T05:35:20,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_423] response is [] 2018-07-21T05:35:20,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] starting 2018-07-21T05:35:20,245 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,245 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] Got response: 204 No Content 2018-07-21T05:35:20,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_404] response is [] 2018-07-21T05:35:20,245 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] starting 2018-07-21T05:35:20,246 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,246 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] Got response: 204 No Content 2018-07-21T05:35:20,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_403] response is [] 2018-07-21T05:35:20,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] starting 2018-07-21T05:35:20,246 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,246 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] Got response: 204 No Content 2018-07-21T05:35:20,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_406] response is [] 2018-07-21T05:35:20,246 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] starting 2018-07-21T05:35:20,247 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,247 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] Got response: 204 No Content 2018-07-21T05:35:20,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_405] response is [] 2018-07-21T05:35:20,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] starting 2018-07-21T05:35:20,247 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,247 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] Got response: 204 No Content 2018-07-21T05:35:20,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_408] response is [] 2018-07-21T05:35:20,247 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] starting 2018-07-21T05:35:20,248 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,248 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] Got response: 204 No Content 2018-07-21T05:35:20,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_407] response is [] 2018-07-21T05:35:20,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] starting 2018-07-21T05:35:20,248 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,248 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] Got response: 204 No Content 2018-07-21T05:35:20,248 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_409] response is [] 2018-07-21T05:35:20,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] starting 2018-07-21T05:35:20,249 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,249 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] Got response: 204 No Content 2018-07-21T05:35:20,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_411] response is [] 2018-07-21T05:35:20,249 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] starting 2018-07-21T05:35:20,250 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,250 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] Got response: 204 No Content 2018-07-21T05:35:20,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_410] response is [] 2018-07-21T05:35:20,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] starting 2018-07-21T05:35:20,250 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,250 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] Got response: 204 No Content 2018-07-21T05:35:20,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_413] response is [] 2018-07-21T05:35:20,250 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] starting 2018-07-21T05:35:20,251 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,251 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] Got response: 204 No Content 2018-07-21T05:35:20,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_412] response is [] 2018-07-21T05:35:20,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:35:20,251 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,251 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:35:20,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:35:20,251 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:35:20,252 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,252 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:35:20,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:35:20,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:35:20,252 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,252 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:35:20,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:35:20,252 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] starting 2018-07-21T05:35:20,253 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,253 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] Got response: 204 No Content 2018-07-21T05:35:20,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_400] response is [] 2018-07-21T05:35:20,253 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] starting 2018-07-21T05:35:20,253 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,253 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] Got response: 204 No Content 2018-07-21T05:35:20,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_402] response is [] 2018-07-21T05:35:20,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] starting 2018-07-21T05:35:20,254 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,254 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] Got response: 204 No Content 2018-07-21T05:35:20,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_401] response is [] 2018-07-21T05:35:20,254 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] starting 2018-07-21T05:35:20,255 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,255 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] Got response: 204 No Content 2018-07-21T05:35:20,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_141] response is [] 2018-07-21T05:35:20,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] starting 2018-07-21T05:35:20,255 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,255 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] Got response: 204 No Content 2018-07-21T05:35:20,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_142] response is [] 2018-07-21T05:35:20,255 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] starting 2018-07-21T05:35:20,256 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,256 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] Got response: 204 No Content 2018-07-21T05:35:20,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_140] response is [] 2018-07-21T05:35:20,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:35:20,256 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,256 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:35:20,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:35:20,256 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:35:20,257 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,257 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:35:20,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:35:20,257 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:35:20,258 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,258 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:35:20,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:35:20,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:35:20,258 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,258 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:35:20,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:35:20,258 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:35:20,259 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,259 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:35:20,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:35:20,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:35:20,259 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,259 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:35:20,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:35:20,259 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:35:20,260 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,260 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:35:20,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:35:20,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:35:20,260 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,260 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:35:20,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:35:20,260 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:35:20,261 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,261 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:35:20,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:35:20,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:35:20,261 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,261 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:35:20,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:35:20,261 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] starting 2018-07-21T05:35:20,262 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,262 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] Got response: 204 No Content 2018-07-21T05:35:20,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_130] response is [] 2018-07-21T05:35:20,262 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] starting 2018-07-21T05:35:20,262 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,262 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] Got response: 204 No Content 2018-07-21T05:35:20,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_217] response is [] 2018-07-21T05:35:20,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] starting 2018-07-21T05:35:20,263 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,263 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] Got response: 204 No Content 2018-07-21T05:35:20,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_338] response is [] 2018-07-21T05:35:20,263 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] starting 2018-07-21T05:35:20,264 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,264 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] Got response: 204 No Content 2018-07-21T05:35:20,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_459] response is [] 2018-07-21T05:35:20,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] starting 2018-07-21T05:35:20,264 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,264 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] Got response: 204 No Content 2018-07-21T05:35:20,264 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_131] response is [] 2018-07-21T05:35:20,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] starting 2018-07-21T05:35:20,265 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,265 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] Got response: 204 No Content 2018-07-21T05:35:20,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_216] response is [] 2018-07-21T05:35:20,265 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] starting 2018-07-21T05:35:20,266 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,266 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] Got response: 204 No Content 2018-07-21T05:35:20,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_337] response is [] 2018-07-21T05:35:20,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] starting 2018-07-21T05:35:20,266 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,266 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] Got response: 204 No Content 2018-07-21T05:35:20,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_458] response is [] 2018-07-21T05:35:20,266 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] starting 2018-07-21T05:35:20,267 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,267 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] Got response: 204 No Content 2018-07-21T05:35:20,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_219] response is [] 2018-07-21T05:35:20,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] starting 2018-07-21T05:35:20,267 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,267 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] Got response: 204 No Content 2018-07-21T05:35:20,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_218] response is [] 2018-07-21T05:35:20,267 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] starting 2018-07-21T05:35:20,268 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,268 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] Got response: 204 No Content 2018-07-21T05:35:20,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_339] response is [] 2018-07-21T05:35:20,268 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] starting 2018-07-21T05:35:20,269 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,269 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] Got response: 204 No Content 2018-07-21T05:35:20,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_134] response is [] 2018-07-21T05:35:20,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] starting 2018-07-21T05:35:20,269 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,269 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] Got response: 204 No Content 2018-07-21T05:35:20,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_135] response is [] 2018-07-21T05:35:20,269 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] starting 2018-07-21T05:35:20,270 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,270 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] Got response: 204 No Content 2018-07-21T05:35:20,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_132] response is [] 2018-07-21T05:35:20,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] starting 2018-07-21T05:35:20,270 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,270 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] Got response: 204 No Content 2018-07-21T05:35:20,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_133] response is [] 2018-07-21T05:35:20,270 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] starting 2018-07-21T05:35:20,271 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,271 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] Got response: 204 No Content 2018-07-21T05:35:20,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_138] response is [] 2018-07-21T05:35:20,271 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] starting 2018-07-21T05:35:20,271 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,272 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] Got response: 204 No Content 2018-07-21T05:35:20,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_139] response is [] 2018-07-21T05:35:20,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] starting 2018-07-21T05:35:20,272 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,272 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] Got response: 204 No Content 2018-07-21T05:35:20,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_136] response is [] 2018-07-21T05:35:20,272 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] starting 2018-07-21T05:35:20,273 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,273 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] Got response: 204 No Content 2018-07-21T05:35:20,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_137] response is [] 2018-07-21T05:35:20,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] starting 2018-07-21T05:35:20,273 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,273 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] Got response: 204 No Content 2018-07-21T05:35:20,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_460] response is [] 2018-07-21T05:35:20,273 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] starting 2018-07-21T05:35:20,274 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,274 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] Got response: 204 No Content 2018-07-21T05:35:20,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_220] response is [] 2018-07-21T05:35:20,274 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] starting 2018-07-21T05:35:20,275 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,275 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] Got response: 204 No Content 2018-07-21T05:35:20,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_341] response is [] 2018-07-21T05:35:20,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] starting 2018-07-21T05:35:20,275 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,275 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] Got response: 204 No Content 2018-07-21T05:35:20,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_462] response is [] 2018-07-21T05:35:20,275 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] starting 2018-07-21T05:35:20,276 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,276 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] Got response: 204 No Content 2018-07-21T05:35:20,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_340] response is [] 2018-07-21T05:35:20,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] starting 2018-07-21T05:35:20,276 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,276 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] Got response: 204 No Content 2018-07-21T05:35:20,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_461] response is [] 2018-07-21T05:35:20,276 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:35:20,277 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,277 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:35:20,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:35:20,277 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] starting 2018-07-21T05:35:20,278 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,278 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] Got response: 204 No Content 2018-07-21T05:35:20,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_222] response is [] 2018-07-21T05:35:20,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] starting 2018-07-21T05:35:20,278 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,278 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] Got response: 204 No Content 2018-07-21T05:35:20,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_343] response is [] 2018-07-21T05:35:20,278 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] starting 2018-07-21T05:35:20,279 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,279 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] Got response: 204 No Content 2018-07-21T05:35:20,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_464] response is [] 2018-07-21T05:35:20,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:35:20,279 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,279 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:35:20,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:35:20,279 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] starting 2018-07-21T05:35:20,279 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,280 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] Got response: 204 No Content 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_221] response is [] 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] starting 2018-07-21T05:35:20,280 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,280 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] Got response: 204 No Content 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_342] response is [] 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] starting 2018-07-21T05:35:20,280 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,280 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] Got response: 204 No Content 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_463] response is [] 2018-07-21T05:35:20,280 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:35:20,281 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,281 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:35:20,281 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:35:20,281 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] starting 2018-07-21T05:35:20,281 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,281 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] Got response: 204 No Content 2018-07-21T05:35:20,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_224] response is [] 2018-07-21T05:35:20,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] starting 2018-07-21T05:35:20,282 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,282 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] Got response: 204 No Content 2018-07-21T05:35:20,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_345] response is [] 2018-07-21T05:35:20,282 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] starting 2018-07-21T05:35:20,283 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,283 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] Got response: 204 No Content 2018-07-21T05:35:20,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_466] response is [] 2018-07-21T05:35:20,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:35:20,283 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,283 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:35:20,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:35:20,283 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] starting 2018-07-21T05:35:20,284 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,284 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] Got response: 204 No Content 2018-07-21T05:35:20,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_223] response is [] 2018-07-21T05:35:20,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] starting 2018-07-21T05:35:20,284 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,284 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] Got response: 204 No Content 2018-07-21T05:35:20,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_344] response is [] 2018-07-21T05:35:20,284 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] starting 2018-07-21T05:35:20,285 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,285 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] Got response: 204 No Content 2018-07-21T05:35:20,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_465] response is [] 2018-07-21T05:35:20,285 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:35:20,286 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,286 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:35:20,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:35:20,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] starting 2018-07-21T05:35:20,286 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,286 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] Got response: 204 No Content 2018-07-21T05:35:20,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_226] response is [] 2018-07-21T05:35:20,286 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] starting 2018-07-21T05:35:20,287 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,287 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] Got response: 204 No Content 2018-07-21T05:35:20,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_347] response is [] 2018-07-21T05:35:20,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] starting 2018-07-21T05:35:20,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,287 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] Got response: 204 No Content 2018-07-21T05:35:20,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_468] response is [] 2018-07-21T05:35:20,287 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:35:20,288 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,288 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:35:20,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:35:20,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] starting 2018-07-21T05:35:20,288 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,288 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] Got response: 204 No Content 2018-07-21T05:35:20,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_225] response is [] 2018-07-21T05:35:20,288 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] starting 2018-07-21T05:35:20,289 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,289 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] Got response: 204 No Content 2018-07-21T05:35:20,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_346] response is [] 2018-07-21T05:35:20,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] starting 2018-07-21T05:35:20,289 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,289 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] Got response: 204 No Content 2018-07-21T05:35:20,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_467] response is [] 2018-07-21T05:35:20,289 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] starting 2018-07-21T05:35:20,290 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,290 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] Got response: 204 No Content 2018-07-21T05:35:20,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_206] response is [] 2018-07-21T05:35:20,290 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] starting 2018-07-21T05:35:20,290 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,290 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] Got response: 204 No Content 2018-07-21T05:35:20,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_327] response is [] 2018-07-21T05:35:20,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] starting 2018-07-21T05:35:20,291 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,291 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] Got response: 204 No Content 2018-07-21T05:35:20,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_448] response is [] 2018-07-21T05:35:20,291 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] starting 2018-07-21T05:35:20,291 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,292 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] Got response: 204 No Content 2018-07-21T05:35:20,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_120] response is [] 2018-07-21T05:35:20,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] starting 2018-07-21T05:35:20,292 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,292 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] Got response: 204 No Content 2018-07-21T05:35:20,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_205] response is [] 2018-07-21T05:35:20,292 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] starting 2018-07-21T05:35:20,293 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,293 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] Got response: 204 No Content 2018-07-21T05:35:20,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_326] response is [] 2018-07-21T05:35:20,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] starting 2018-07-21T05:35:20,293 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,293 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] Got response: 204 No Content 2018-07-21T05:35:20,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_447] response is [] 2018-07-21T05:35:20,293 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] starting 2018-07-21T05:35:20,294 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,294 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] Got response: 204 No Content 2018-07-21T05:35:20,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_208] response is [] 2018-07-21T05:35:20,294 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] starting 2018-07-21T05:35:20,295 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,295 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] Got response: 204 No Content 2018-07-21T05:35:20,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_329] response is [] 2018-07-21T05:35:20,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] starting 2018-07-21T05:35:20,295 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,295 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] Got response: 204 No Content 2018-07-21T05:35:20,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_207] response is [] 2018-07-21T05:35:20,295 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] starting 2018-07-21T05:35:20,296 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,296 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] Got response: 204 No Content 2018-07-21T05:35:20,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_328] response is [] 2018-07-21T05:35:20,296 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] starting 2018-07-21T05:35:20,296 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,296 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] Got response: 204 No Content 2018-07-21T05:35:20,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_449] response is [] 2018-07-21T05:35:20,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] starting 2018-07-21T05:35:20,297 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,297 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] Got response: 204 No Content 2018-07-21T05:35:20,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_123] response is [] 2018-07-21T05:35:20,297 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] starting 2018-07-21T05:35:20,298 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,298 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] Got response: 204 No Content 2018-07-21T05:35:20,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_124] response is [] 2018-07-21T05:35:20,298 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] starting 2018-07-21T05:35:20,298 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,299 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] Got response: 204 No Content 2018-07-21T05:35:20,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_209] response is [] 2018-07-21T05:35:20,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] starting 2018-07-21T05:35:20,299 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,299 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] Got response: 204 No Content 2018-07-21T05:35:20,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_121] response is [] 2018-07-21T05:35:20,299 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] starting 2018-07-21T05:35:20,300 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,300 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] Got response: 204 No Content 2018-07-21T05:35:20,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_122] response is [] 2018-07-21T05:35:20,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] starting 2018-07-21T05:35:20,300 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,300 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] Got response: 204 No Content 2018-07-21T05:35:20,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_127] response is [] 2018-07-21T05:35:20,300 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] starting 2018-07-21T05:35:20,301 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,301 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] Got response: 204 No Content 2018-07-21T05:35:20,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_128] response is [] 2018-07-21T05:35:20,301 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] starting 2018-07-21T05:35:20,302 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,302 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] Got response: 204 No Content 2018-07-21T05:35:20,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_125] response is [] 2018-07-21T05:35:20,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] starting 2018-07-21T05:35:20,302 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,302 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] Got response: 204 No Content 2018-07-21T05:35:20,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_126] response is [] 2018-07-21T05:35:20,302 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] starting 2018-07-21T05:35:20,303 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,303 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] Got response: 204 No Content 2018-07-21T05:35:20,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_129] response is [] 2018-07-21T05:35:20,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] starting 2018-07-21T05:35:20,303 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,303 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] Got response: 204 No Content 2018-07-21T05:35:20,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_330] response is [] 2018-07-21T05:35:20,303 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] starting 2018-07-21T05:35:20,304 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,304 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] Got response: 204 No Content 2018-07-21T05:35:20,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_451] response is [] 2018-07-21T05:35:20,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] starting 2018-07-21T05:35:20,304 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,304 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] Got response: 204 No Content 2018-07-21T05:35:20,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_450] response is [] 2018-07-21T05:35:20,304 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] starting 2018-07-21T05:35:20,305 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,305 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] Got response: 204 No Content 2018-07-21T05:35:20,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_211] response is [] 2018-07-21T05:35:20,305 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] starting 2018-07-21T05:35:20,305 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,305 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] Got response: 204 No Content 2018-07-21T05:35:20,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_332] response is [] 2018-07-21T05:35:20,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] starting 2018-07-21T05:35:20,306 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,306 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] Got response: 204 No Content 2018-07-21T05:35:20,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_453] response is [] 2018-07-21T05:35:20,306 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] starting 2018-07-21T05:35:20,307 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,307 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] Got response: 204 No Content 2018-07-21T05:35:20,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_210] response is [] 2018-07-21T05:35:20,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] starting 2018-07-21T05:35:20,307 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,307 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] Got response: 204 No Content 2018-07-21T05:35:20,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_331] response is [] 2018-07-21T05:35:20,307 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] starting 2018-07-21T05:35:20,308 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,308 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] Got response: 204 No Content 2018-07-21T05:35:20,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_452] response is [] 2018-07-21T05:35:20,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] starting 2018-07-21T05:35:20,308 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,308 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] Got response: 204 No Content 2018-07-21T05:35:20,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_213] response is [] 2018-07-21T05:35:20,308 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] starting 2018-07-21T05:35:20,309 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,309 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] Got response: 204 No Content 2018-07-21T05:35:20,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_334] response is [] 2018-07-21T05:35:20,309 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] starting 2018-07-21T05:35:20,310 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,310 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] Got response: 204 No Content 2018-07-21T05:35:20,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_455] response is [] 2018-07-21T05:35:20,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] starting 2018-07-21T05:35:20,310 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,310 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] Got response: 204 No Content 2018-07-21T05:35:20,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_212] response is [] 2018-07-21T05:35:20,310 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] starting 2018-07-21T05:35:20,311 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,311 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] Got response: 204 No Content 2018-07-21T05:35:20,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_333] response is [] 2018-07-21T05:35:20,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] starting 2018-07-21T05:35:20,311 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,311 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] Got response: 204 No Content 2018-07-21T05:35:20,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_454] response is [] 2018-07-21T05:35:20,311 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] starting 2018-07-21T05:35:20,312 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,312 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] Got response: 204 No Content 2018-07-21T05:35:20,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_215] response is [] 2018-07-21T05:35:20,312 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] starting 2018-07-21T05:35:20,313 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,313 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] Got response: 204 No Content 2018-07-21T05:35:20,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_336] response is [] 2018-07-21T05:35:20,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] starting 2018-07-21T05:35:20,313 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,313 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] Got response: 204 No Content 2018-07-21T05:35:20,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_457] response is [] 2018-07-21T05:35:20,313 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] starting 2018-07-21T05:35:20,314 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,314 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] Got response: 204 No Content 2018-07-21T05:35:20,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_214] response is [] 2018-07-21T05:35:20,314 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] starting 2018-07-21T05:35:20,314 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,314 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] Got response: 204 No Content 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_335] response is [] 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] starting 2018-07-21T05:35:20,315 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,315 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] Got response: 204 No Content 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_456] response is [] 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] starting 2018-07-21T05:35:20,315 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,315 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] Got response: 204 No Content 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_316] response is [] 2018-07-21T05:35:20,315 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] starting 2018-07-21T05:35:20,316 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,316 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] Got response: 204 No Content 2018-07-21T05:35:20,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_437] response is [] 2018-07-21T05:35:20,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] starting 2018-07-21T05:35:20,316 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,316 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] Got response: 204 No Content 2018-07-21T05:35:20,316 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_315] response is [] 2018-07-21T05:35:20,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] starting 2018-07-21T05:35:20,317 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,317 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] Got response: 204 No Content 2018-07-21T05:35:20,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_436] response is [] 2018-07-21T05:35:20,317 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] starting 2018-07-21T05:35:20,318 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,318 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] Got response: 204 No Content 2018-07-21T05:35:20,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_318] response is [] 2018-07-21T05:35:20,318 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] starting 2018-07-21T05:35:20,318 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,318 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] Got response: 204 No Content 2018-07-21T05:35:20,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_439] response is [] 2018-07-21T05:35:20,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] starting 2018-07-21T05:35:20,319 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,319 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] Got response: 204 No Content 2018-07-21T05:35:20,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_317] response is [] 2018-07-21T05:35:20,319 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] starting 2018-07-21T05:35:20,320 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,320 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] Got response: 204 No Content 2018-07-21T05:35:20,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_438] response is [] 2018-07-21T05:35:20,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] starting 2018-07-21T05:35:20,320 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,320 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] Got response: 204 No Content 2018-07-21T05:35:20,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_112] response is [] 2018-07-21T05:35:20,320 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] starting 2018-07-21T05:35:20,321 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,321 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] Got response: 204 No Content 2018-07-21T05:35:20,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_113] response is [] 2018-07-21T05:35:20,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] starting 2018-07-21T05:35:20,321 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,321 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] Got response: 204 No Content 2018-07-21T05:35:20,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_319] response is [] 2018-07-21T05:35:20,321 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] starting 2018-07-21T05:35:20,322 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,322 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] Got response: 204 No Content 2018-07-21T05:35:20,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_110] response is [] 2018-07-21T05:35:20,322 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] starting 2018-07-21T05:35:20,323 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,323 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] Got response: 204 No Content 2018-07-21T05:35:20,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_111] response is [] 2018-07-21T05:35:20,324 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] starting 2018-07-21T05:35:20,325 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,325 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] Got response: 204 No Content 2018-07-21T05:35:20,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_116] response is [] 2018-07-21T05:35:20,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] starting 2018-07-21T05:35:20,325 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,325 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] Got response: 204 No Content 2018-07-21T05:35:20,325 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_117] response is [] 2018-07-21T05:35:20,326 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] starting 2018-07-21T05:35:20,326 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,326 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] Got response: 204 No Content 2018-07-21T05:35:20,326 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_114] response is [] 2018-07-21T05:35:20,326 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] starting 2018-07-21T05:35:20,327 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,327 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] Got response: 204 No Content 2018-07-21T05:35:20,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_115] response is [] 2018-07-21T05:35:20,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] starting 2018-07-21T05:35:20,327 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,327 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] Got response: 204 No Content 2018-07-21T05:35:20,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_118] response is [] 2018-07-21T05:35:20,327 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] starting 2018-07-21T05:35:20,328 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,328 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] Got response: 204 No Content 2018-07-21T05:35:20,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_119] response is [] 2018-07-21T05:35:20,328 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] starting 2018-07-21T05:35:20,329 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,329 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] Got response: 204 No Content 2018-07-21T05:35:20,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_440] response is [] 2018-07-21T05:35:20,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] starting 2018-07-21T05:35:20,329 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,329 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] Got response: 204 No Content 2018-07-21T05:35:20,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_200] response is [] 2018-07-21T05:35:20,329 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] starting 2018-07-21T05:35:20,330 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,330 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] Got response: 204 No Content 2018-07-21T05:35:20,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_321] response is [] 2018-07-21T05:35:20,330 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] starting 2018-07-21T05:35:20,331 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,331 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] Got response: 204 No Content 2018-07-21T05:35:20,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_442] response is [] 2018-07-21T05:35:20,331 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] starting 2018-07-21T05:35:20,332 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,332 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] Got response: 204 No Content 2018-07-21T05:35:20,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_320] response is [] 2018-07-21T05:35:20,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] starting 2018-07-21T05:35:20,332 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,332 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] Got response: 204 No Content 2018-07-21T05:35:20,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_441] response is [] 2018-07-21T05:35:20,332 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] starting 2018-07-21T05:35:20,333 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,333 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] Got response: 204 No Content 2018-07-21T05:35:20,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_202] response is [] 2018-07-21T05:35:20,333 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] starting 2018-07-21T05:35:20,334 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,334 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] Got response: 204 No Content 2018-07-21T05:35:20,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_323] response is [] 2018-07-21T05:35:20,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] starting 2018-07-21T05:35:20,334 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,334 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] Got response: 204 No Content 2018-07-21T05:35:20,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_444] response is [] 2018-07-21T05:35:20,334 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] starting 2018-07-21T05:35:20,335 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,335 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] Got response: 204 No Content 2018-07-21T05:35:20,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_201] response is [] 2018-07-21T05:35:20,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] starting 2018-07-21T05:35:20,335 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,335 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] Got response: 204 No Content 2018-07-21T05:35:20,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_322] response is [] 2018-07-21T05:35:20,335 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] starting 2018-07-21T05:35:20,336 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,336 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] Got response: 204 No Content 2018-07-21T05:35:20,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_443] response is [] 2018-07-21T05:35:20,336 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] starting 2018-07-21T05:35:20,336 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,336 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] Got response: 204 No Content 2018-07-21T05:35:20,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_204] response is [] 2018-07-21T05:35:20,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] starting 2018-07-21T05:35:20,337 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,337 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] Got response: 204 No Content 2018-07-21T05:35:20,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_325] response is [] 2018-07-21T05:35:20,337 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] starting 2018-07-21T05:35:20,338 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,338 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] Got response: 204 No Content 2018-07-21T05:35:20,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_446] response is [] 2018-07-21T05:35:20,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] starting 2018-07-21T05:35:20,338 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,338 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] Got response: 204 No Content 2018-07-21T05:35:20,338 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_203] response is [] 2018-07-21T05:35:20,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] starting 2018-07-21T05:35:20,339 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,339 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] Got response: 204 No Content 2018-07-21T05:35:20,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_324] response is [] 2018-07-21T05:35:20,339 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] starting 2018-07-21T05:35:20,340 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,340 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] Got response: 204 No Content 2018-07-21T05:35:20,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_445] response is [] 2018-07-21T05:35:20,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] starting 2018-07-21T05:35:20,340 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,340 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] Got response: 204 No Content 2018-07-21T05:35:20,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_305] response is [] 2018-07-21T05:35:20,340 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] starting 2018-07-21T05:35:20,341 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,341 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] Got response: 204 No Content 2018-07-21T05:35:20,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_426] response is [] 2018-07-21T05:35:20,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] starting 2018-07-21T05:35:20,341 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,341 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] Got response: 204 No Content 2018-07-21T05:35:20,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_304] response is [] 2018-07-21T05:35:20,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] starting 2018-07-21T05:35:20,342 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,342 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] Got response: 204 No Content 2018-07-21T05:35:20,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_425] response is [] 2018-07-21T05:35:20,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] starting 2018-07-21T05:35:20,342 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,342 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] Got response: 204 No Content 2018-07-21T05:35:20,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_307] response is [] 2018-07-21T05:35:20,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] starting 2018-07-21T05:35:20,343 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,343 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] Got response: 204 No Content 2018-07-21T05:35:20,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_428] response is [] 2018-07-21T05:35:20,343 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] starting 2018-07-21T05:35:20,343 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,343 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] Got response: 204 No Content 2018-07-21T05:35:20,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_306] response is [] 2018-07-21T05:35:20,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] starting 2018-07-21T05:35:20,344 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,344 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] Got response: 204 No Content 2018-07-21T05:35:20,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_427] response is [] 2018-07-21T05:35:20,344 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] starting 2018-07-21T05:35:20,345 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,345 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] Got response: 204 No Content 2018-07-21T05:35:20,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_101] response is [] 2018-07-21T05:35:20,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] starting 2018-07-21T05:35:20,345 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,345 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] Got response: 204 No Content 2018-07-21T05:35:20,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_309] response is [] 2018-07-21T05:35:20,345 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] starting 2018-07-21T05:35:20,346 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,346 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] Got response: 204 No Content 2018-07-21T05:35:20,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_102] response is [] 2018-07-21T05:35:20,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] starting 2018-07-21T05:35:20,346 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,346 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] Got response: 204 No Content 2018-07-21T05:35:20,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_308] response is [] 2018-07-21T05:35:20,346 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] starting 2018-07-21T05:35:20,347 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,347 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] Got response: 204 No Content 2018-07-21T05:35:20,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_429] response is [] 2018-07-21T05:35:20,347 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] starting 2018-07-21T05:35:20,348 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,348 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] Got response: 204 No Content 2018-07-21T05:35:20,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_100] response is [] 2018-07-21T05:35:20,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] starting 2018-07-21T05:35:20,348 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,348 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] Got response: 204 No Content 2018-07-21T05:35:20,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_105] response is [] 2018-07-21T05:35:20,348 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] starting 2018-07-21T05:35:20,349 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,349 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] Got response: 204 No Content 2018-07-21T05:35:20,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_106] response is [] 2018-07-21T05:35:20,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] starting 2018-07-21T05:35:20,350 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,350 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] Got response: 204 No Content 2018-07-21T05:35:20,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_103] response is [] 2018-07-21T05:35:20,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] starting 2018-07-21T05:35:20,350 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,350 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] Got response: 204 No Content 2018-07-21T05:35:20,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_104] response is [] 2018-07-21T05:35:20,350 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] starting 2018-07-21T05:35:20,351 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,351 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] Got response: 204 No Content 2018-07-21T05:35:20,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_109] response is [] 2018-07-21T05:35:20,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] starting 2018-07-21T05:35:20,351 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,351 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] Got response: 204 No Content 2018-07-21T05:35:20,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_107] response is [] 2018-07-21T05:35:20,351 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] starting 2018-07-21T05:35:20,352 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,352 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] Got response: 204 No Content 2018-07-21T05:35:20,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_108] response is [] 2018-07-21T05:35:20,352 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] starting 2018-07-21T05:35:20,353 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,353 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] Got response: 204 No Content 2018-07-21T05:35:20,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_310] response is [] 2018-07-21T05:35:20,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] starting 2018-07-21T05:35:20,353 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,353 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] Got response: 204 No Content 2018-07-21T05:35:20,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_431] response is [] 2018-07-21T05:35:20,353 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] starting 2018-07-21T05:35:20,354 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,354 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] Got response: 204 No Content 2018-07-21T05:35:20,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_430] response is [] 2018-07-21T05:35:20,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] starting 2018-07-21T05:35:20,354 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,354 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] Got response: 204 No Content 2018-07-21T05:35:20,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_312] response is [] 2018-07-21T05:35:20,354 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] starting 2018-07-21T05:35:20,355 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,355 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] Got response: 204 No Content 2018-07-21T05:35:20,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_433] response is [] 2018-07-21T05:35:20,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] starting 2018-07-21T05:35:20,355 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,355 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] Got response: 204 No Content 2018-07-21T05:35:20,355 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_311] response is [] 2018-07-21T05:35:20,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] starting 2018-07-21T05:35:20,356 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,356 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] Got response: 204 No Content 2018-07-21T05:35:20,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_432] response is [] 2018-07-21T05:35:20,356 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] starting 2018-07-21T05:35:20,357 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,357 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] Got response: 204 No Content 2018-07-21T05:35:20,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_314] response is [] 2018-07-21T05:35:20,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] starting 2018-07-21T05:35:20,357 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,357 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] Got response: 204 No Content 2018-07-21T05:35:20,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_435] response is [] 2018-07-21T05:35:20,357 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] starting 2018-07-21T05:35:20,358 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,358 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] Got response: 204 No Content 2018-07-21T05:35:20,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_313] response is [] 2018-07-21T05:35:20,358 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] starting 2018-07-21T05:35:20,359 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,359 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] Got response: 204 No Content 2018-07-21T05:35:20,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_434] response is [] 2018-07-21T05:35:20,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] starting 2018-07-21T05:35:20,359 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,359 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] Got response: 204 No Content 2018-07-21T05:35:20,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_5] response is [] 2018-07-21T05:35:20,359 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] starting 2018-07-21T05:35:20,360 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,360 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] Got response: 204 No Content 2018-07-21T05:35:20,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_4] response is [] 2018-07-21T05:35:20,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] starting 2018-07-21T05:35:20,360 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,360 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] Got response: 204 No Content 2018-07-21T05:35:20,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_7] response is [] 2018-07-21T05:35:20,360 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] starting 2018-07-21T05:35:20,361 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,361 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] Got response: 204 No Content 2018-07-21T05:35:20,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_6] response is [] 2018-07-21T05:35:20,361 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] starting 2018-07-21T05:35:20,362 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,362 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] Got response: 204 No Content 2018-07-21T05:35:20,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_9] response is [] 2018-07-21T05:35:20,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] starting 2018-07-21T05:35:20,362 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,362 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] Got response: 204 No Content 2018-07-21T05:35:20,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_8] response is [] 2018-07-21T05:35:20,362 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] starting 2018-07-21T05:35:20,363 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,363 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] Got response: 204 No Content 2018-07-21T05:35:20,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_1] response is [] 2018-07-21T05:35:20,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] starting 2018-07-21T05:35:20,363 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,363 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] Got response: 204 No Content 2018-07-21T05:35:20,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_17] response is [] 2018-07-21T05:35:20,363 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] starting 2018-07-21T05:35:20,364 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,364 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] Got response: 204 No Content 2018-07-21T05:35:20,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_16] response is [] 2018-07-21T05:35:20,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] starting 2018-07-21T05:35:20,364 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,364 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] Got response: 204 No Content 2018-07-21T05:35:20,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_3] response is [] 2018-07-21T05:35:20,364 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] starting 2018-07-21T05:35:20,365 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,365 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] Got response: 204 No Content 2018-07-21T05:35:20,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_19] response is [] 2018-07-21T05:35:20,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] starting 2018-07-21T05:35:20,365 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,365 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] Got response: 204 No Content 2018-07-21T05:35:20,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1969-12-31T23:00:00.000Z_1970-01-01T00:00:00.000Z_2018-07-21T05:31:59.547-07:00_2] response is [] 2018-07-21T05:35:20,365 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] starting 2018-07-21T05:35:20,366 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,366 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] Got response: 204 No Content 2018-07-21T05:35:20,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_18] response is [] 2018-07-21T05:35:20,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] starting 2018-07-21T05:35:20,366 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,366 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] Got response: 204 No Content 2018-07-21T05:35:20,366 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_13] response is [] 2018-07-21T05:35:20,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] starting 2018-07-21T05:35:20,367 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,367 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] Got response: 204 No Content 2018-07-21T05:35:20,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_12] response is [] 2018-07-21T05:35:20,367 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] starting 2018-07-21T05:35:20,368 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,368 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] Got response: 204 No Content 2018-07-21T05:35:20,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_15] response is [] 2018-07-21T05:35:20,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] starting 2018-07-21T05:35:20,368 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,368 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] Got response: 204 No Content 2018-07-21T05:35:20,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_14] response is [] 2018-07-21T05:35:20,368 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] starting 2018-07-21T05:35:20,369 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,369 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] Got response: 204 No Content 2018-07-21T05:35:20,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_11] response is [] 2018-07-21T05:35:20,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] starting 2018-07-21T05:35:20,369 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,369 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] Got response: 204 No Content 2018-07-21T05:35:20,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_10] response is [] 2018-07-21T05:35:20,369 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] starting 2018-07-21T05:35:20,370 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,370 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] Got response: 204 No Content 2018-07-21T05:35:20,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_190] response is [] 2018-07-21T05:35:20,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] starting 2018-07-21T05:35:20,370 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,370 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] Got response: 204 No Content 2018-07-21T05:35:20,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_192] response is [] 2018-07-21T05:35:20,370 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] starting 2018-07-21T05:35:20,371 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,371 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] Got response: 204 No Content 2018-07-21T05:35:20,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_191] response is [] 2018-07-21T05:35:20,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] starting 2018-07-21T05:35:20,371 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,371 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] Got response: 204 No Content 2018-07-21T05:35:20,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_194] response is [] 2018-07-21T05:35:20,371 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] starting 2018-07-21T05:35:20,372 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,372 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] Got response: 204 No Content 2018-07-21T05:35:20,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_193] response is [] 2018-07-21T05:35:20,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] starting 2018-07-21T05:35:20,372 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,372 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] Got response: 204 No Content 2018-07-21T05:35:20,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_196] response is [] 2018-07-21T05:35:20,372 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] starting 2018-07-21T05:35:20,373 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,373 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] Got response: 204 No Content 2018-07-21T05:35:20,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_195] response is [] 2018-07-21T05:35:20,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] starting 2018-07-21T05:35:20,373 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,373 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] Got response: 204 No Content 2018-07-21T05:35:20,373 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_198] response is [] 2018-07-21T05:35:20,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] starting 2018-07-21T05:35:20,374 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,374 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] Got response: 204 No Content 2018-07-21T05:35:20,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_197] response is [] 2018-07-21T05:35:20,374 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] starting 2018-07-21T05:35:20,376 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,376 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] Got response: 204 No Content 2018-07-21T05:35:20,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_199] response is [] 2018-07-21T05:35:20,376 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] starting 2018-07-21T05:35:20,377 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,377 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] Got response: 204 No Content 2018-07-21T05:35:20,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_57] response is [] 2018-07-21T05:35:20,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] starting 2018-07-21T05:35:20,377 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,377 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] Got response: 204 No Content 2018-07-21T05:35:20,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_56] response is [] 2018-07-21T05:35:20,377 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] starting 2018-07-21T05:35:20,378 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,378 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] Got response: 204 No Content 2018-07-21T05:35:20,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_59] response is [] 2018-07-21T05:35:20,378 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] starting 2018-07-21T05:35:20,379 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,379 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] Got response: 204 No Content 2018-07-21T05:35:20,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_58] response is [] 2018-07-21T05:35:20,379 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] starting 2018-07-21T05:35:20,380 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,380 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] Got response: 204 No Content 2018-07-21T05:35:20,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_53] response is [] 2018-07-21T05:35:20,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] starting 2018-07-21T05:35:20,380 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,380 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] Got response: 204 No Content 2018-07-21T05:35:20,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_52] response is [] 2018-07-21T05:35:20,380 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] starting 2018-07-21T05:35:20,381 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,381 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] Got response: 204 No Content 2018-07-21T05:35:20,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_55] response is [] 2018-07-21T05:35:20,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] starting 2018-07-21T05:35:20,381 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,381 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] Got response: 204 No Content 2018-07-21T05:35:20,381 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_54] response is [] 2018-07-21T05:35:20,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] starting 2018-07-21T05:35:20,382 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,382 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] Got response: 204 No Content 2018-07-21T05:35:20,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_51] response is [] 2018-07-21T05:35:20,382 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] starting 2018-07-21T05:35:20,383 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,383 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] Got response: 204 No Content 2018-07-21T05:35:20,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_50] response is [] 2018-07-21T05:35:20,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] starting 2018-07-21T05:35:20,383 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,383 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] Got response: 204 No Content 2018-07-21T05:35:20,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_49] response is [] 2018-07-21T05:35:20,383 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] starting 2018-07-21T05:35:20,384 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,384 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] Got response: 204 No Content 2018-07-21T05:35:20,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_46] response is [] 2018-07-21T05:35:20,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] starting 2018-07-21T05:35:20,384 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,384 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] Got response: 204 No Content 2018-07-21T05:35:20,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_45] response is [] 2018-07-21T05:35:20,384 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] starting 2018-07-21T05:35:20,385 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,385 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] Got response: 204 No Content 2018-07-21T05:35:20,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_48] response is [] 2018-07-21T05:35:20,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] starting 2018-07-21T05:35:20,385 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,385 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] Got response: 204 No Content 2018-07-21T05:35:20,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_47] response is [] 2018-07-21T05:35:20,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] starting 2018-07-21T05:35:20,386 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,386 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] Got response: 204 No Content 2018-07-21T05:35:20,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_42] response is [] 2018-07-21T05:35:20,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] starting 2018-07-21T05:35:20,386 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,386 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] Got response: 204 No Content 2018-07-21T05:35:20,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_41] response is [] 2018-07-21T05:35:20,386 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] starting 2018-07-21T05:35:20,387 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,387 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] Got response: 204 No Content 2018-07-21T05:35:20,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_44] response is [] 2018-07-21T05:35:20,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] starting 2018-07-21T05:35:20,387 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,387 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] Got response: 204 No Content 2018-07-21T05:35:20,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_43] response is [] 2018-07-21T05:35:20,387 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] starting 2018-07-21T05:35:20,388 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,388 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] Got response: 204 No Content 2018-07-21T05:35:20,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_40] response is [] 2018-07-21T05:35:20,388 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] starting 2018-07-21T05:35:20,389 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,389 DEBUG [HttpClient-Netty-Worker-5] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] Got response: 204 No Content 2018-07-21T05:35:20,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_39] response is [] 2018-07-21T05:35:20,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] starting 2018-07-21T05:35:20,389 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,389 DEBUG [HttpClient-Netty-Worker-6] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] Got response: 204 No Content 2018-07-21T05:35:20,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_38] response is [] 2018-07-21T05:35:20,389 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] starting 2018-07-21T05:35:20,390 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,390 DEBUG [HttpClient-Netty-Worker-7] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] Got response: 204 No Content 2018-07-21T05:35:20,390 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_35] response is [] 2018-07-21T05:35:20,390 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] starting 2018-07-21T05:35:20,390 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,390 DEBUG [HttpClient-Netty-Worker-8] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] Got response: 204 No Content 2018-07-21T05:35:20,390 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_34] response is [] 2018-07-21T05:35:20,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] starting 2018-07-21T05:35:20,391 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,391 DEBUG [HttpClient-Netty-Worker-9] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] Got response: 204 No Content 2018-07-21T05:35:20,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_37] response is [] 2018-07-21T05:35:20,391 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] starting 2018-07-21T05:35:20,392 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,392 DEBUG [HttpClient-Netty-Worker-10] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] Got response: 204 No Content 2018-07-21T05:35:20,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_36] response is [] 2018-07-21T05:35:20,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] starting 2018-07-21T05:35:20,392 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,392 DEBUG [HttpClient-Netty-Worker-11] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] Got response: 204 No Content 2018-07-21T05:35:20,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_31] response is [] 2018-07-21T05:35:20,392 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] starting 2018-07-21T05:35:20,393 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,393 DEBUG [HttpClient-Netty-Worker-12] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] Got response: 204 No Content 2018-07-21T05:35:20,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_30] response is [] 2018-07-21T05:35:20,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] starting 2018-07-21T05:35:20,393 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,393 DEBUG [HttpClient-Netty-Worker-13] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] Got response: 204 No Content 2018-07-21T05:35:20,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_33] response is [] 2018-07-21T05:35:20,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] starting 2018-07-21T05:35:20,394 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,394 DEBUG [HttpClient-Netty-Worker-14] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] Got response: 204 No Content 2018-07-21T05:35:20,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_32] response is [] 2018-07-21T05:35:20,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] starting 2018-07-21T05:35:20,394 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,394 DEBUG [HttpClient-Netty-Worker-15] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] Got response: 204 No Content 2018-07-21T05:35:20,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_28] response is [] 2018-07-21T05:35:20,394 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] starting 2018-07-21T05:35:20,395 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,395 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] Got response: 204 No Content 2018-07-21T05:35:20,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_27] response is [] 2018-07-21T05:35:20,395 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] starting 2018-07-21T05:35:20,396 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,396 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] Got response: 204 No Content 2018-07-21T05:35:20,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_29] response is [] 2018-07-21T05:35:20,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] starting 2018-07-21T05:35:20,396 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,396 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] Got response: 204 No Content 2018-07-21T05:35:20,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_24] response is [] 2018-07-21T05:35:20,396 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] starting 2018-07-21T05:35:20,397 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,397 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] Got response: 204 No Content 2018-07-21T05:35:20,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_23] response is [] 2018-07-21T05:35:20,397 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] starting 2018-07-21T05:35:20,397 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,397 DEBUG [HttpClient-Netty-Worker-0] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] Got response: 204 No Content 2018-07-21T05:35:20,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_26] response is [] 2018-07-21T05:35:20,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] starting 2018-07-21T05:35:20,398 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,398 DEBUG [HttpClient-Netty-Worker-1] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] Got response: 204 No Content 2018-07-21T05:35:20,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_25] response is [] 2018-07-21T05:35:20,398 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] starting 2018-07-21T05:35:20,399 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,399 DEBUG [HttpClient-Netty-Worker-2] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] Got response: 204 No Content 2018-07-21T05:35:20,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_20] response is [] 2018-07-21T05:35:20,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] starting 2018-07-21T05:35:20,399 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,399 DEBUG [HttpClient-Netty-Worker-3] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] Got response: 204 No Content 2018-07-21T05:35:20,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_22] response is [] 2018-07-21T05:35:20,399 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] starting 2018-07-21T05:35:20,400 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] messageReceived: DefaultHttpResponse(chunked: false) HTTP/1.1 204 No Content Date: Sat, 21 Jul 2018 12:35:20 GMT Content-Type: application/json Server: Jetty(9.3.z-SNAPSHOT) 2018-07-21T05:35:20,400 DEBUG [HttpClient-Netty-Worker-4] client.NettyHttpClient: [GET http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] Got response: 204 No Content 2018-07-21T05:35:20,400 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Checking segment [http://localhost:8081/druid/coordinator/v1/datasources/default.druid_max_size_partition/segments/default.druid_max_size_partition_1970-01-01T00:00:00.000Z_1970-01-01T01:00:00.000Z_2018-07-21T05:31:59.547-07:00_21] response is [] 2018-07-21T05:35:49,240 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:35:49,302 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:35:50,401 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Wait time exhausted and we have [634] out of [634] segments not loaded yet 2018-07-21T05:35:50,421 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,421 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,431 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:35:50,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Ignoring request to add WriteEntity(default@druid_max_size_partition) Type=TABLE WriteType=DDL_NO_LOCK because WriteEntity(default@druid_max_size_partition) Type=TABLE WriteType=DDL_NO_LOCK is present 2018-07-21T05:35:50,432 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-3:STATS] in serial mode 2018-07-21T05:35:50,432 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@31f656e6, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:35:50,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:35:50,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,433 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:35:50,433 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:35:50,433 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:35:50,433 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:35:50,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,433 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:35:50,434 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,434 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:35:50,434 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:35:50,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,435 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,437 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:35:50,438 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,439 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:35:50,439 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,439 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,439 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,446 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Executing stats task 2018-07-21T05:35:50,446 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:35:50,450 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:35:50,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_max_size_partition/, numRows 2018-07-21T05:35:50,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_max_size_partition/, numRows, 9173: 2018-07-21T05:35:50,455 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Part ID: default.druid_max_size_partition/, rawDataSize 2018-07-21T05:35:50,455 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: Read stats for default.druid_max_size_partition/, rawDataSize, 0: 2018-07-21T05:35:50,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:35:50,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:35:50,456 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,456 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: alter_table: hive.default.druid_max_size_partition newtbl=druid_max_size_partition 2018-07-21T05:35:50,456 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=alter_table: hive.default.druid_max_size_partition newtbl=druid_max_size_partition 2018-07-21T05:35:50,463 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,475 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,476 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStatsTask: Table default.druid_max_size_partition stats: [numFiles=0, numRows=9173, totalSize=0, rawDataSize=0, numFilesErasureCoded=0] 2018-07-21T05:35:50,476 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsAggregator: About to delete stats tmp dir :hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1/-ext-10003 2018-07-21T05:35:50,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,477 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,477 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:35:50,477 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:35:50,477 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@alltypesorc 2018-07-21T05:35:50,477 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: database:default 2018-07-21T05:35:50,477 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: default@druid_max_size_partition 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.__time EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] 2018-07-21T05:35:50,478 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Lineage: druid_max_size_partition.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] 2018-07-21T05:35:50,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,478 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 7 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {alter_table_with_environmentContext_(String, String, Table, EnvironmentContext, )=20, getTable_(String, String, )=7} 2018-07-21T05:35:50,481 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721053159_6582869f-0da3-4041-9795-5a01e96e7cb3); Time taken: 230.901 seconds 2018-07-21T05:35:50,481 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,481 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE" ) AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:35:50,481 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-31-59_445_1971394732788950030-1 2018-07-21T05:35:50,482 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-31-59_445_1971394732788950030-1 2018-07-21T05:35:50,482 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 231.036 seconds 2018-07-21T05:35:50,482 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:35:50,482 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:35:50,482 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:35:50,482 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:35:50,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:35:50,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,484 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,484 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721053550_c753ec7e-877b-4c04-8046-af75fba899f4): SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,485 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,486 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:35:50,486 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:35:50,486 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,487 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:35:50,487 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:35:50,487 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,487 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,487 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,495 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,498 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,498 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:35:50,498 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:35:50,500 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1 2018-07-21T05:35:50,500 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:35:50,502 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:35:50,503 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,503 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,503 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,504 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,505 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,505 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,505 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,506 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_max_size_partition 2018-07-21T05:35:50,506 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=druid_max_size_partition 2018-07-21T05:35:50,506 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,507 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FunctionRegistry: Looking up GenericUDAF: sum 2018-07-21T05:35:50,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:35:50,508 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,509 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:35:50,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:35:50,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:35:50,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(_o__c0=[$0], _o__c1=[$1]) HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject($f0=[$7], $f1=[$8]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:35:50,510 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,511 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,512 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveAggregate(group=[{}], agg#0=[sum($0)], agg#1=[sum($1)]) HiveProject(cint=[$7], cbigint=[$8]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]]) 2018-07-21T05:35:50,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,535 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:35:50,535 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:35:50,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [Object [type=DATABASE, name=default]] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:35:50,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,537 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:35:50,537 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_materialized_views_for_rewriting: db=@hive#default 2018-07-21T05:35:50,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAuthorizationValidator: Obtained following objects in filterListCmdObjects [] for user hive_test_user. Context Info: QueryContext [commandString=null, forwardedAddresses=null] 2018-07-21T05:35:50,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after top-level introduceDerivedTable HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject($f0=[$0], $f1=[$1]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(_c0=[$0], _c1=[$1]) DruidQuery(table=[[default, druid_max_size_partition]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]], groups=[{}], aggs=[[sum($7), sum($8)]]) 2018-07-21T05:35:50,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:35:50,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:35:50,566 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:35:50,566 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:35:50,567 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1 2018-07-21T05:35:50,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:35:50,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for druid_max_size_partition TS[0] 2018-07-21T05:35:50,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB druid_max_size_partition{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB druid_max_size_partition{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:35:50,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (. (tok_table_or_col druid_max_size_partition) $f0) _c0) (tok_selexpr (. (tok_table_or_col druid_max_size_partition) $f1) _c1)) 2018-07-21T05:35:50,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = druid_max_size_partition{($f0,$f0: bigint)($f1,$f1: bigint)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:35:50,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:35:50,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:35:50,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001/.hive-staging_hive_2018-07-21_05-35-50_485_838479351491744857-1 for path = hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001 2018-07-21T05:35:50,569 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001/.hive-staging_hive_2018-07-21_05-35-50_485_838479351491744857-1 2018-07-21T05:35:50,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001/.hive-staging_hive_2018-07-21_05-35-50_485_838479351491744857-1/-ext-10003 2018-07-21T05:35:50,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001 row schema: null{($f0,_col0: bigint)($f1,_col1: bigint)} 2018-07-21T05:35:50,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:35:50,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:35:50,570 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:35:50,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-SEL[1]-FS[2] 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(2) 2018-07-21T05:35:50,571 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(1) 2018-07-21T05:35:50,571 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-SEL[1]-FS[2] 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-SEL[1]-LIST_SINK[3] 2018-07-21T05:35:50,572 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:35:50,572 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:35:50,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,572 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:$f0, type:bigint, comment:null), FieldSchema(name:$f1, type:bigint, comment:null)], properties:null) 2018-07-21T05:35:50,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [$f0, $f1] types: [bigint, bigint] 2018-07-21T05:35:50,573 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing operator TS[0] 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Operator 0 TS initialized 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initializing children of 0 TS 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing child 1 SEL 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing operator SEL[1] 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: SELECT struct<$f0:bigint,$f1:bigint> 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Operator 1 SEL initialized 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initializing children of 1 SEL 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing child 3 LIST_SINK 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initializing operator LIST_SINK[3] 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Operator 3 LIST_SINK initialized 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Initialization Done 3 LIST_SINK done is reset. 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Initialization Done 1 SEL done is reset. 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Initialization Done 0 TS done is reset. 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(Configuration, )=0, getMaterializedViewsForRewriting_(String, )=2, getTable_(String, String, )=19, flushCache_()=0, getAllDatabases_()=2, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721053550_c753ec7e-877b-4c04-8046-af75fba899f4); Time taken: 0.09 seconds 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721053550_c753ec7e-877b-4c04-8046-af75fba899f4): SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: QUERY 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@druid_max_size_partition 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: query: SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: type: QUERY 2018-07-21T05:35:50,574 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Input: default@druid_max_size_partition 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: POSTHOOK: Output: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] hooks.RuntimeStatsPersistenceCheckerHook: signature checked: 0 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721053550_c753ec7e-877b-4c04-8046-af75fba899f4); Time taken: 0.001 seconds 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: OK 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:35:50,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:35:50,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition 2018-07-21T05:35:50,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: Creating fetchTask with deserializer typeinfo: struct<$f0:bigint,$f1:bigint> 2018-07-21T05:35:50,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FetchOperator: deserializer properties: table properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_max_size_partition, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_max_size_partition, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_max_size_partition { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532176550, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_max_size_partition","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_max_size_partition} partition properties: {columns.types=timestamp with local time zone:string:string:double:float:tinyint:smallint:int:bigint:boolean:boolean, location=hdfs://localhost:35925/build/ql/test/data/warehouse/druid_max_size_partition, columns=__time,cstring1,cstring2,cdouble,cfloat,ctinyint,csmallint,cint,cbigint,cboolean1,cboolean2, druid.datasource=default.druid_max_size_partition, COLUMN_STATS_ACCURATE={"BASIC_STATS":"true"}, serialization.format=1, numRows=9173, numFiles=0, druid.query.type=timeseries, serialization.ddl=struct druid_max_size_partition { timestamp with local time zone __time, string cstring1, string cstring2, double cdouble, float cfloat, byte ctinyint, i16 csmallint, i32 cint, i64 cbigint, bool cboolean1, bool cboolean2}, druid.query.granularity=MINUTE, druid.fieldTypes=bigint,bigint, transient_lastDdlTime=1532176550, druid.segment.granularity=HOUR, druid.fieldNames=$f0,$f1, rawDataSize=0, columns.comments=, totalSize=0, bucket_count=-1, file.outputformat=org.apache.hadoop.mapred.SequenceFileOutputFormat, serialization.lib=org.apache.hadoop.hive.druid.serde.DruidSerDe, external.table.purge=true, storage_handler=org.apache.hadoop.hive.druid.DruidStorageHandler, bucketing_version=2, file.inputformat=org.apache.hadoop.mapred.SequenceFileInputFormat, druid.query.json={"queryType":"timeseries","dataSource":"default.druid_max_size_partition","descending":false,"granularity":"all","aggregations":[{"type":"longSum","name":"$f0","fieldName":"cint"},{"type":"longSum","name":"$f1","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}, EXTERNAL=TRUE, column.name.delimiter=,, numFilesErasureCoded=0, name=default.druid_max_size_partition} 2018-07-21T05:35:50,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidQueryRecordReader: Retrieving data from druid using query: TimeseriesQuery{dataSource='default.druid_max_size_partition', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}} 2018-07-21T05:35:50,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] client.NettyHttpClient: [POST http://localhost:8082/druid/v2/] starting 2018-07-21T05:35:50,580 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.ChannelResourceFactory: Generating: http://localhost:8082 2018-07-21T05:35:50,587 WARN [HttpClient-Netty-Boss-0] channel.SimpleChannelUpstreamHandler: EXCEPTION, please implement org.apache.hive.druid.org.jboss.netty.handler.codec.http.HttpContentDecompressor.exceptionCaught() for proper handling. java.net.ConnectException: Connection refused: localhost/127.0.0.1:8082 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) ~[?:1.8.0_102] at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) ~[?:1.8.0_102] at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.connect(NioClientBoss.java:152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.processSelectedKeys(NioClientBoss.java:105) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.process(NioClientBoss.java:79) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.run(NioClientBoss.java:42) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_102] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_102] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_102] 2018-07-21T05:35:50,593 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Failed with exception java.io.IOException:org.apache.hive.druid.io.druid.java.util.common.RE: Failure getting results for query[TimeseriesQuery{dataSource='default.druid_max_size_partition', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}}] url[http://localhost:8082/druid/v2/] because of [org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool] java.io.IOException: org.apache.hive.druid.io.druid.java.util.common.RE: Failure getting results for query[TimeseriesQuery{dataSource='default.druid_max_size_partition', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}}] url[http://localhost:8082/druid/v2/] because of [org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool] at org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:602) at org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:509) at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:146) at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:2722) at org.apache.hadoop.hive.ql.reexec.ReExecDriver.getResults(ReExecDriver.java:229) at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:259) at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.junit.runners.Suite.runChild(Suite.java:127) at org.junit.runners.Suite.runChild(Suite.java:26) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) Caused by: org.apache.hive.druid.io.druid.java.util.common.RE: Failure getting results for query[TimeseriesQuery{dataSource='default.druid_max_size_partition', querySegmentSpec=LegacySegmentSpec{intervals=[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]}, descending=false, virtualColumns=[], dimFilter=null, granularity='AllGranularity', aggregatorSpecs=[LongSumAggregatorFactory{fieldName='cint', expression='null', name='$f0'}, LongSumAggregatorFactory{fieldName='cbigint', expression='null', name='$f1'}], postAggregatorSpecs=[], context={skipEmptyBuckets=true}}] url[http://localhost:8082/druid/v2/] because of [org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool] at org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader$JsonParserIterator.init(DruidQueryRecordReader.java:268) at org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader$JsonParserIterator.hasNext(DruidQueryRecordReader.java:207) at org.apache.hadoop.hive.druid.serde.DruidTimeseriesQueryRecordReader.nextKeyValue(DruidTimeseriesQueryRecordReader.java:47) at org.apache.hadoop.hive.druid.serde.DruidTimeseriesQueryRecordReader.next(DruidTimeseriesQueryRecordReader.java:72) at org.apache.hadoop.hive.druid.serde.DruidTimeseriesQueryRecordReader.next(DruidTimeseriesQueryRecordReader.java:33) at org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:569) ... 50 more Caused by: java.util.concurrent.ExecutionException: org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool at org.apache.hive.druid.com.google.common.util.concurrent.Futures$ImmediateFailedFuture.get(Futures.java:186) at org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader$JsonParserIterator.init(DruidQueryRecordReader.java:245) ... 55 more Caused by: org.apache.hive.druid.org.jboss.netty.channel.ChannelException: Faulty channel in resource pool at org.apache.hive.druid.com.metamx.http.client.NettyHttpClient.go(NettyHttpClient.java:143) at org.apache.hive.druid.com.metamx.http.client.AbstractHttpClient.go(AbstractHttpClient.java:14) at org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader.initialize(DruidQueryRecordReader.java:116) at org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader.initialize(DruidQueryRecordReader.java:123) at org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.getRecordReader(DruidQueryBasedInputFormat.java:295) at org.apache.hadoop.hive.ql.exec.FetchOperator$FetchInputFormatSplit.getRecordReader(FetchOperator.java:776) at org.apache.hadoop.hive.ql.exec.FetchOperator.getRecordReader(FetchOperator.java:344) at org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:540) ... 50 more Caused by: java.net.ConnectException: Connection refused: localhost/127.0.0.1:8082 at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.connect(NioClientBoss.java:152) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.processSelectedKeys(NioClientBoss.java:105) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.process(NioClientBoss.java:79) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337) at org.apache.hive.druid.org.jboss.netty.channel.socket.nio.NioClientBoss.run(NioClientBoss.java:42) at org.apache.hive.druid.org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108) at org.apache.hive.druid.org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: close called for operator TS[0] 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing operator TS[0] 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_TS_0:0, 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Closing child = SEL[1] 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: close called for operator SEL[1] 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing operator SEL[1] 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_SEL_1:0, 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Closing child = LIST_SINK[3] 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: close called for operator LIST_SINK[3] 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: allInitializedParentsAreClosed? parent.state = CLOSE 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: Closing operator LIST_SINK[3] 2018-07-21T05:35:50,594 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: RECORDS_OUT_INTERMEDIATE:0, RECORDS_OUT_OPERATOR_LIST_SINK_3:0, 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ListSinkOperator: 3 Close done 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: 1 Close done 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: 0 Close done 2018-07-21T05:35:50,594 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting result dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001 2018-07-21T05:35:50,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1/-mr-10001/.hive-staging_hive_2018-07-21_05-35-50_485_838479351491744857-1 2018-07-21T05:35:50,595 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-35-50_485_838479351491744857-1 2018-07-21T05:35:50,595 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] CliDriver: Time taken: 0.091 seconds 2018-07-21T05:35:50,595 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:35:50,595 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:35:50,595 ERROR [main] QTestUtil: Client execution failed with error code = 1 running " SELECT sum(cint), sum(cbigint) FROM druid_max_size_partition " fname=druidmini_dynamic_partition.q See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. 2018-07-21T05:35:50,596 INFO [main] control.CoreCliDriver: Done query druidmini_dynamic_partition.q. succeeded=false, skipped=false. ElapsedTime(ms)=908727 2018-07-21T05:35:50,598 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0008 2018-07-21T05:35:50,600 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0008 closed 2018-07-21T05:35:50,612 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@136e9a68 2018-07-21T05:35:50,614 INFO [main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook to org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl 2018-07-21T05:35:50,614 DEBUG [main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@3f35489a, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:35:50,614 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:35:50,614 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,615 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:35:50,615 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:36999 2018-07-21T05:35:50,615 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:35:50,615 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:35:50,615 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:35:50,615 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,615 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:35:50,615 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:35:50,615 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:36999 2018-07-21T05:35:50,616 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:35:50,616 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0009 with negotiated timeout 40000 for client /127.0.0.1:36999 2018-07-21T05:35:50,618 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:35:50,619 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:35:50,619 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:35:50,619 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:35:50,619 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:35:50,619 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:35:50,620 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:35:50,621 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:35:50,626 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:35:50,627 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:35:50,632 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:35:50,633 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:35:50,633 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:35:50,633 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:35:50,683 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,684 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:35:50,684 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:35:50,685 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,685 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,685 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,688 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,689 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,689 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:35:50,689 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,689 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:35:50,689 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,689 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:35:50,689 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:35:50,690 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,691 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,691 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,691 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:35:50,691 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:35:50,697 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,697 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,697 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,697 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:35:50,698 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:35:50,704 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,705 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,705 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,705 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,705 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,711 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,711 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,712 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,712 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:35:50,712 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:35:50,717 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,718 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,718 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,718 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:35:50,718 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:35:50,724 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,725 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,725 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,725 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:35:50,725 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:35:50,726 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,726 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,726 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,726 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:35:50,726 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:35:50,726 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,726 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,727 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,727 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,727 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,732 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,733 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,733 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,733 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,733 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,739 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:50,740 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,740 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:50,740 INFO [main] metastore.HiveMetaStore: 0: drop_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,740 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=drop_table : tbl=hive.default.druid_max_size_partition 2018-07-21T05:35:50,741 DEBUG [main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:35:50,879 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit, isolation) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:51,010 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:35:51,011 DEBUG [main] metastore.ReplChangeManager: Repl policy is not set for database 2018-07-21T05:35:51,011 DEBUG [main] utils.FileUtils: deleting hdfs://localhost:35925/build/ql/test/data/warehouse/druid_max_size_partition 2018-07-21T05:35:51,021 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:35:51,021 INFO [main] druid.DruidStorageHandler: Dropping with purge all the data for data source default.druid_max_size_partition 2018-07-21T05:35:51,021 DEBUG [main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:35:51,022 INFO [main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:35:51,031 DEBUG [main] common.RetryUtils: Failed on try 1, retrying in 998ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:35:52,032 DEBUG [main] common.RetryUtils: Failed on try 2, retrying in 2,250ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:35:54,284 DEBUG [main] common.RetryUtils: Failed on try 3, retrying in 4,779ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:35:59,066 WARN [main] common.RetryUtils: Failed on try 4, retrying in 7,255ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:36:06,324 WARN [main] common.RetryUtils: Failed on try 5, retrying in 15,907ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:36:19,240 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:36:19,302 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:36:22,234 WARN [main] common.RetryUtils: Failed on try 6, retrying in 19,409ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:36:41,645 WARN [main] common.RetryUtils: Failed on try 7, retrying in 44,928ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:36:49,240 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:36:49,302 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:37:19,241 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:37:19,303 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:37:26,576 WARN [main] common.RetryUtils: Failed on try 8, retrying in 53,036ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:37:49,241 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:37:49,303 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:38:15,369 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0001/recovery/1/summary is closed by DFSClient_NONMAPREDUCE_-1638917738_1 2018-07-21T05:38:15,519 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0005 2018-07-21T05:38:15,606 DEBUG [ContainersLauncher #0] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #0, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,733 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,741 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:17,747 DEBUG [ApplicationMasterLauncher #1] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:38:17,747 DEBUG [ApplicationMasterLauncher #1] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:38:17,749 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0001_000001 (auth:SIMPLE) 2018-07-21T05:38:18,347 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,347 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,348 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,735 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,735 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,735 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,735 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,736 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:18,737 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:38:19,241 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:38:19,303 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:38:19,615 WARN [main] common.RetryUtils: Failed on try 9, retrying in 39,055ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:38:49,242 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:38:49,303 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:38:58,676 INFO [main] control.CoreCliDriver: PerTestTearDown done. ElapsedTime(ms)=188079 2018-07-21T05:38:58,703 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0009 2018-07-21T05:38:58,705 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d0009 closed 2018-07-21T05:38:58,705 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@14e11cb7 2018-07-21T05:38:58,708 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:37428 2018-07-21T05:38:58,708 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:38:58,708 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:37428 2018-07-21T05:38:58,709 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:38:58,710 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000a with negotiated timeout 40000 for client /127.0.0.1:37428 2018-07-21T05:38:58,711 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:38:58,712 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:38:58,712 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:38:58,712 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:38:58,713 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:38:58,713 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:38:58,713 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:38:58,713 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:38:58,713 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:38:58,713 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:38:58,713 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:38:58,713 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:38:58,713 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:38:58,714 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:38:58,720 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:38:58,722 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,727 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,729 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:38:58,729 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:38:58,729 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:38:58,730 INFO [main] control.CoreCliDriver: PerTestSetup done. ElapsedTime(ms)=36 2018-07-21T05:38:58,730 INFO [main] control.CoreCliDriver: Begin query: druidmini_expressions.q 2018-07-21T05:38:58,731 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,731 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:38:58,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:38:58,731 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,731 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:38:58,731 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,731 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:38:58,731 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: false 2018-07-21T05:38:58,731 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,731 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:38:58,731 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:38:58,731 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:38:58,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:38:58,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,733 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:38:58,733 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138): CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:38:58,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,734 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:38:58,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:38:58,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,737 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:38:58,737 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:38:58,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,741 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,742 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:38:58,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,743 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:38:58,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,743 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:38:58,743 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:38:58,743 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:38:58,744 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:38:58,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@740f7490, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:38:58,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:38:58,744 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,744 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:38:58,744 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:38:58,744 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:38:58,745 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:38:58,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:38:58,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,746 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:38:58,746 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:38:58,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,747 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,749 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:38:58,750 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:38:58,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:38:58,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_table_n0 position=22 2018-07-21T05:38:58,752 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:38:58,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:38:58,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,756 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:38:58,756 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,756 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table_n0 2018-07-21T05:38:58,756 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table_n0 2018-07-21T05:38:58,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,757 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,758 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:38:58,758 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:38:58,759 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,759 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,759 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,772 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:38:58,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:38:58,773 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,773 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,774 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:38:58,774 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1 2018-07-21T05:38:58,776 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:38:58,777 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,778 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,778 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,787 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,787 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,795 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,796 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,796 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,796 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,797 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,797 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,797 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,806 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,806 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:38:58,806 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:38:58,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:38:58,822 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,823 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,824 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,837 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,839 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,840 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,853 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:38:58,854 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_table_n0 position=22 2018-07-21T05:38:58,854 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,854 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table_n0 2018-07-21T05:38:58,854 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table_n0 2018-07-21T05:38:58,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,855 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,856 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,856 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,856 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:38:58,857 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,857 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:38:58,863 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,864 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:38:58,864 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:38:58,864 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,864 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,864 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,865 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:38:58,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:38:58,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:38:58,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:38:58,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:38:58,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:38:58,867 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:38:58,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:38:58,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:38:58,868 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10003 2018-07-21T05:38:58,869 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,869 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:38:58,870 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,870 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:38:58,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:38:58,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:38:58,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:38:58,872 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity]] 2018-07-21T05:38:58,872 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:38:58,872 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,872 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:38:58,873 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:38:58,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:38:58,874 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,874 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:38:58,874 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:38:58,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.287786ms + 0.010996ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:38:58,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:38:58,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,883 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:38:58,883 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:38:58,884 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:38:58,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,884 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,884 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:38:58,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:38:58,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:38:58,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:38:58,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:38:58,886 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:38:58,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:38:58,887 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_table_n0 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:38:58,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping vectorization 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:38:58,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:38:58,888 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:38:58,888 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=9, getUniqueConstraints_(UniqueConstraintsRequest, )=9, getPrimaryKeys_(PrimaryKeysRequest, )=10, getTableColumnStatistics_(String, String, List, )=8, getForeignKeys_(ForeignKeysRequest, )=14} 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138); Time taken: 0.156 seconds 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138): CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_table_n0 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138 2018-07-21T05:38:58,889 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:38:58,889 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,890 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:38:58,890 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:38:58,900 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:38:58,953 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-38-58_734_8651408448308552626-1 2018-07-21T05:38:58,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-38-58_734_8651408448308552626-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:38:58,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:38:58,953 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:38:58,953 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:38:58,953 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,953 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138 2018-07-21T05:38:58,954 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:38:58,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:38:58,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,954 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_table_n0 ...NULL (Stage-1) 2018-07-21T05:38:58,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\nCREATE EXTERNAL TABLE druid_table_n0\nSTORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\nTBLPROPERTIES (\"druid.segment.granularity\" = \"HOUR\", \"druid.query.granularity\" = \"MINUTE\")\nAS\nSELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:38:58,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:38:58,954 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,956 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,956 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:38:58,958 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,958 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 2.94KB 2018-07-21T05:38:58,964 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10003 2018-07-21T05:38:58,965 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10003 2018-07-21T05:38:58,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10001 2018-07-21T05:38:58,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10002 2018-07-21T05:38:58,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,971 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,971 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-38-58_734_8651408448308552626-1 2018-07-21T05:38:58,973 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:38:58,973 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,973 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:38:58,975 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,975 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.19KB 2018-07-21T05:38:58,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:38:58,987 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,990 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:38:58,990 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001, dagName=CREATE EXTERNAL TABLE druid_table_n0 ...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138 } 2018-07-21T05:38:58,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0001, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0001/, diagnostics=Session timed out, lastDAGCompletionTime=1532176382574 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=7, successfulDAGs=7, failedDAGs=0, killedDAGs=0 2018-07-21T05:38:58,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Tez session was closed. Reopening... 2018-07-21T05:38:58,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Closing Tez Session 2018-07-21T05:38:58,999 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Shutting down Tez Session, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001 2018-07-21T05:38:59,000 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0001, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0001/, diagnostics=Session timed out, lastDAGCompletionTime=1532176382574 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=7, successfulDAGs=7, failedDAGs=0, killedDAGs=0 2018-07-21T05:38:59,000 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Failed to shutdown Tez Session via proxy org.apache.tez.dag.api.SessionNotRunning: Application not running, applicationId=application_1532175606211_0001, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0001/, diagnostics=Session timed out, lastDAGCompletionTime=1532176382574 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=7, successfulDAGs=7, failedDAGs=0, killedDAGs=0 at org.apache.tez.client.TezClientUtils.getAMProxy(TezClientUtils.java:901) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.getAMProxy(TezClient.java:958) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.stop(TezClient.java:641) [tez-api-0.9.1.jar:0.9.1] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.closeClient(TezSessionState.java:706) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.close(TezSessionState.java:673) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopenInternal(TezSessionPoolManager.java:492) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopen(TezSessionPoolManager.java:483) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.reopen(TezSessionState.java:931) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.getNewTezSessionOnError(TezTask.java:530) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.submit(TezTask.java:546) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:220) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:38:59,001 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Could not connect to AM, killing session via YARN, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0001 2018-07-21T05:38:59,009 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Killed application application_1532175606211_0001 2018-07-21T05:38:59,009 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state STOPPED 2018-07-21T05:38:59,011 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Attemting to clean up resources for ee745c13-27f8-4940-a347-c8307a2da8be: null 2018-07-21T05:38:59,011 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: User of session id ee745c13-27f8-4940-a347-c8307a2da8be is hiveptest 2018-07-21T05:38:59,012 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting resources to hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources; 1 additional files, 1 localized resources 2018-07-21T05:38:59,012 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:38:59,014 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,016 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,016 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:38:59,017 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar] is hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:38:59,017 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629385 for hdfs://localhost:35925/user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:38:59,018 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:38:59,018 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,019 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:38:59,020 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,021 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:38:59,021 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:38:59,021 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar] is hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:38:59,022 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629464 for hdfs://localhost:35925/user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:38:59,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=10, tez(original):tez.runtime.io.sort.mb=24, tez(final):tez.runtime.io.sort.mb=24 2018-07-21T05:38:59,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2018-07-21T05:38:59,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2018-07-21T05:38:59,038 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=5, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=5 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.05, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.05 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=10, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=10 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=20000, tez(final):tez.runtime.shuffle.connect.timeout=20000 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=0.4, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.4 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.node-blacklisting.enable, mr initial value=false, tez(original):tez.am.node-blacklisting.enabled=false, tez(final):tez.am.node-blacklisting.enabled=false 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=1024, tez(final):tez.counters.max=1024 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2018-07-21T05:38:59,039 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2018-07-21T05:38:59,044 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting Tez Session access for sessionId=ee745c13-27f8-4940-a347-c8307a2da8be with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:38:59,045 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez Client Version: [ component=tez-api, version=0.9.1, revision=23b58b2b996eee255aab1a045412de00677ca2f1, SCM-URL=scm:git:https://git-wip-us.apache.org/repos/asf/tez.git, buildTime=2017-12-13T00:06:01Z ] 2018-07-21T05:38:59,045 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Opening new Tez Session (id: ee745c13-27f8-4940-a347-c8307a2da8be, scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be) 2018-07-21T05:38:59,046 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state INITED 2018-07-21T05:38:59,060 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.RMProxy: Connecting to ResourceManager at hive-ptest-slaves-a56.c.gcp-hive-upstream.internal/10.128.0.18:59658 2018-07-21T05:38:59,061 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl is started 2018-07-21T05:38:59,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Session mode. Starting session. 2018-07-21T05:38:59,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris value from configuration: hdfs://localhost:35925/user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:38:59,061 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris.classpath value from configuration: null 2018-07-21T05:38:59,070 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez system stage directory hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002 doesn't exist and is created 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:38:59,081 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:38:59,081 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744535_3711, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/tez-conf.pb 2018-07-21T05:38:59,103 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/tez-conf.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:38:59,106 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:38:59,106 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744536_3712, replicas=127.0.0.1:52570, 127.0.0.1:40780, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/tez.session.local-resources.pb 2018-07-21T05:38:59,121 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/tez.session.local-resources.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:38:59,133 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Submitted application application_1532175606211_0002 2018-07-21T05:38:59,135 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: The url to track the Tez Session: http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0002/ 2018-07-21T05:38:59,377 DEBUG [ApplicationMasterLauncher #2] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:38:59,377 DEBUG [ApplicationMasterLauncher #2] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:38:59,378 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0002_000001 (auth:SIMPLE) 2018-07-21T05:38:59,398 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0002 2018-07-21T05:38:59,541 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:38:59,541 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:38:59,541 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:38:59,541 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:38:59,542 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:38:59,544 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:38:59,549 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:38:59,549 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:38:59,549 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:38:59,549 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:38:59,788 DEBUG [ContainersLauncher #3] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #3, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:39:00,793 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:00,793 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:06,234 INFO [Socket Reader #1 for port 60399] ipc.Server: Auth successful for appattempt_1532175606211_0002_000001 (auth:SIMPLE) 2018-07-21T05:39:06,363 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:37470 2018-07-21T05:39:06,367 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:37470 2018-07-21T05:39:06,369 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000b with negotiated timeout 40000 for client /127.0.0.1:37470 2018-07-21T05:39:06,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:39:06,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:39:06,751 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0002, dagName=CREATE EXTERNAL TABLE druid_table_n0 ...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138 } 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:39:06,947 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:39:06,947 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744537_3713, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/summary 2018-07-21T05:39:07,045 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/summary for DFSClient_NONMAPREDUCE_-324715073_1 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:39:07,064 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:39:07,064 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744538_3714, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/dag_1532175606211_0002_1.recovery 2018-07-21T05:39:07,085 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/dag_1532175606211_0002_1.recovery for DFSClient_NONMAPREDUCE_-324715073_1 2018-07-21T05:39:07,167 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0002, dagId=dag_1532175606211_0002_1, dagName=CREATE EXTERNAL TABLE druid_table_n0 ...NULL (Stage-1) 2018-07-21T05:39:07,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:07,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:07,167 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:08,176 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:08,176 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0002) 2018-07-21T05:39:08,192 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:39:09,774 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0002_000001 (auth:SIMPLE) 2018-07-21T05:39:09,789 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0002 2018-07-21T05:39:09,790 DEBUG [ContainersLauncher #4] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #4, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:39:11,220 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:39:14,241 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:14,241 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:39:17,263 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:39:17,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:17,767 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:17,767 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:39:19,074 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:39:19,074 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744539_3715, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/d4ebeb49931946c4931c65cd25692f4d/0_descriptor.json 2018-07-21T05:39:19,137 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/d4ebeb49931946c4931c65cd25692f4d/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,149 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:39:19,149 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744540_3716, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_38_58.869-07_00/0_index.zip 2018-07-21T05:39:19,159 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/d4ebeb49931946c4931c65cd25692f4d/0_index.zip is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,181 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:39:19,181 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744541_3717, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/segmentsDescriptorDir/default.druid_table_n0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053858.869-0700.json 2018-07-21T05:39:19,191 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/segmentsDescriptorDir/default.druid_table_n0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T053858.869-0700.json is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,242 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:39:19,304 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:39:19,791 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:39:19,791 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744542_3718, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/497764a1b3c74762b22f01011aef51be/0_index.zip 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:39:19,826 DEBUG [IPC Server handler 1 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:39:19,826 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744543_3719, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/497764a1b3c74762b22f01011aef51be/0_descriptor.json 2018-07-21T05:39:19,841 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/497764a1b3c74762b22f01011aef51be/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,846 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/intermediateSegmentDir/default.druid_table_n0/497764a1b3c74762b22f01011aef51be/0_index.zip is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:39:19,853 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:39:19,853 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744544_3720, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/segmentsDescriptorDir/default.druid_table_n0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053858.869-0700.json 2018-07-21T05:39:19,859 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138/segmentsDescriptorDir/default.druid_table_n0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T053858.869-0700.json is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,878 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:39:19,879 DEBUG [IPC Server handler 6 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:39:19,879 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744545_3721, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:39:19,884 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0002_r_000000_0_1644268220_30 2018-07-21T05:39:19,918 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/dag_1532175606211_0002_1.recovery is closed by DFSClient_NONMAPREDUCE_-324715073_1 2018-07-21T05:39:19,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:19,926 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:39:19,926 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@10312f0a, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_table_n0 2018-07-21T05:39:19,931 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:19,931 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_table_n0 from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1/-ext-10002 2018-07-21T05:39:19,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:39:19,939 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:39:19,939 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_table_n0 2018-07-21T05:39:19,939 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_table_n0 on null 2018-07-21T05:39:19,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:39:19,940 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:39:19,941 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:39:19,941 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:39:19,942 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:39:19,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:39:19,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:39:19,942 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:39:19,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:39:19,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:39:19,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:39:19,944 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:39:19,944 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:39:19,945 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:39:19,946 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:39:19,947 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 1, retrying in 1,054ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:21,003 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 2, retrying in 2,277ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:23,284 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 3, retrying in 4,831ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:26,915 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0002_000001 (auth:SIMPLE) 2018-07-21T05:39:26,943 WARN [ContainersLauncher #4] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0002_01_000002 is : 143 2018-07-21T05:39:26,945 DEBUG [ContainersLauncher #4] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #4, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:39:28,118 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 4, retrying in 9,185ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:29,060 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,060 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,063 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,064 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:29,064 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:39:37,306 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 5, retrying in 17,808ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:49,242 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:39:49,304 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:39:54,847 INFO [pool-5-thread-1] NameNodeMetricsLog: >> Begin NameNode metrics dump 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":252... 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:39:54,848 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=129314 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.17572463768115942 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=2 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0002_000001":1} 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.532608695652174 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:SentBytes=101146 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1106 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=2 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1106 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=24.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=2 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=2 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=6846.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,850 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=17908 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=17908 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:FilesTotal=774 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlocksTotal=698 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsed=304172833 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=698 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemaining=157932127054 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=163741184223 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=230 69 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalSyncCount=14829 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=17908 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemainingGB=147.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@4ddcb5c8 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1554 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39448658202 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=57254463 2018-07-21T05:39:54,851 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,852 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=11796640 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.09846589961647491 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=85 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.15974653993663498 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:SentBytes=4359458 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=26766 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=26766 2018-07-21T05:39:54,853 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=43 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=257.42825 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=905 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=180.72052 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=407 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.1153846153846154 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=2 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=63 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=2 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=1 2018-07-21T05:39:54,861 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=4.0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":269... 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:39:54,862 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=2 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=9 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=10 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@2ca152e2 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1492 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39448617242 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50069425 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":276... 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:39:54,863 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,864 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=43 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=257.45917 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=501 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=182.13939 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=407 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,868 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=286913 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=9 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=5.8 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:SentBytes=2670 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=9 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=9 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=9 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=5 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=4 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=3.5 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=7.333333333333333 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3080 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsNumOps=14828 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.01962630359212051 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.0211665664461816 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.06611570247933884 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesCreated=3601 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=242 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetListingOps=34 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TotalFileOps=11921 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AddBlockOps=2721 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteFileOps=1382 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsNumOps=17908 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8192 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:39:54,869 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateFileOps=2721 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesRenamed=2026 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=702 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FileInfoOps=2320 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1027 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesDeleted=2828 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetBlockLocations=717 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=172448 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=6 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:SentBytes=1773 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=6 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=6 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=6 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:39:54,870 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:DfsUsed=97960080 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Remaining=39448559898 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1506 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3464 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1517 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=1.2 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=400 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2047 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=591.3913270637408 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=192 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2037 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=317142.3411358404 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195815491 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5474452554744526 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=192 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3342 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3342 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100072053 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=192 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=7852.697740112994 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=400 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2037 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.915 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=206 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5606 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2037 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=4088.244744355048 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=5969.282485875706 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=0.630057803468208 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:39:54,871 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":262... 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,872 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentRemaining=46.848083 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentUsed=0.09022809 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:TotalBlocks=698 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=163741184223 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.09022809 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"us... 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"17908"} 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Free=157932127054 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=304172833 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Threads=601 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_A... 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Used=304172833 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hivepte... 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/t... 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:DfsUsed=50069425 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Remaining=39448510746 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:39:54,873 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1492 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@7e8e19c3 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1517 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39582720000 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100075146 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=698 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,874 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,875 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlocksTotal=698 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:39:54-0700","windows":[{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptes... 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityUsed=304172833 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityRemaining=157932127054 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncTimes=230 69 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FilesTotal=774 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncCount=14829 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:39:54,877 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=2 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.5154826958105647 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=4.0 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1102 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=3.5 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,878 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=2 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=27 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2139 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.5 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.027676240208877285 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.08270676691729323 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2320 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8192 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2721 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.11801242236024845 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1348 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.18396226415094338 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.1956521739130435 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2793 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=34 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=717 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=10 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.5 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.1866412213740458 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=0.25147116516280893 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.49784229109454686 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=5 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2721 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1398 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.3333333333333333 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.1111111111111111 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.13887454827052143 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=1600 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=37 2018-07-21T05:39:54,879 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.1175 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=43 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=257.5373 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=184.2872 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=407 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:39:54,885 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=6138 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=3 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8571428571428572 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:SentBytes=3354 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=16 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=16 2018-07-21T05:39:54,886 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsWaiting=43 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=257.55054 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapUsedM=185.70639 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=407 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:39:54,892 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3524 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1506 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=1.21 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=400 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2038 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=43.23915343915344 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=182 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2028 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=313080.08094098885 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95566866 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.5398416886543536 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=182 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1805 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1805 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=100986561 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=182 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=7627.66966966967 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=400 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2028 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.955 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=203 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5579 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2028 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=4679.894266175697 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5681.477477477478 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=0.4303030303030303 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,893 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6145 2018-07-21T05:39:54,894 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,896 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:39:54,896 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:39:54,896 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8571428571428572 2018-07-21T05:39:54,896 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,896 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=16 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,897 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=43 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=257.5514 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=122 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=185.70639 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=407 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@496383ef 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1506 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39448396058 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=97960080 2018-07-21T05:39:54,901 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,902 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=8 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:39:54,903 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-... 2018-07-21T05:39:54,904 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=122235 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.3103448275862069 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=8 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.1724137931034482 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:SentBytes=44742 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=68 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=68 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:DfsUsed=100075146 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Remaining=39582556160 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1517 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:39:54,905 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=43 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=257.55856 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=817 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=187.12527 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=407 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=2 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=9 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=10 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:39:54,910 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3591 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1554 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=1.125 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=400 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2071 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=583.8390923156267 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=188 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2066 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=299191.4768211921 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=229 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52050419 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.55 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=188 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1158 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1158 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=58687606 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=188 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=8018.4622356495465 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=400 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2066 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.885 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=148 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5028 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2066 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5558.195696721312 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=5662.326283987915 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=0.5460122699386503 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersKilled=4 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableVCores=7 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=5 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=52.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94479661 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRunning=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=5 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedGB=1 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:39:54,911 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:DfsUsed=57254463 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Remaining=39448248602 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1554 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsNumOps=10 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.25 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3519 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1492 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=1.23 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=400 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2036 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=554.7783613445379 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:39:54,912 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=182 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2029 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=294465.6415743217 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2631157 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.5710540115364446 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=182 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=399 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=399 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49144577 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=182 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=19370.69536423841 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=400 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2029 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.915 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4805 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2029 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=4589.980900052328 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5521.2913907284765 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=0.3973509933774834 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: << End NameNode metrics dump 2018-07-21T05:39:55,117 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 6, retrying in 22,405ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1197"}] 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":252,"usedSpace":3287990,"freeSpace":19748540416,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":261,"usedSpace":53966473,"freeSpace":19614322970,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,326 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=129314 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.17572463768115942 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=2 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0002_000001":1} 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.532608695652174 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=101146 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1106 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=2 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1106 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=24.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=2 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=2 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=6846.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=17908 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=17908 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=774 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=698 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=304172833 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=698 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157932127054 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=163741184223 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=230 69 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14829 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=17908 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=147.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@47a7374c 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1554 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39362822426 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=57254463 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=11796640 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.09846589961647491 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=85 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:39:55,327 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.15974653993663498 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4359458 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=26766 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=26766 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=43 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=257.42825 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=905 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=180.72052 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=407 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.1153846153846154 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=2 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=63 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=2 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=4.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":269,"usedSpace":4669440,"freeSpace":19614290202,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":269,"usedSpace":45399985,"freeSpace":19748507648,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=2 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=9 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=10 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@2898535f 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1492 2018-07-21T05:39:55,328 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39362781466 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50069425 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":276,"usedSpace":53740171,"freeSpace":19614282010,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":247,"usedSpace":44219909,"freeSpace":19748499456,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=43 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=257.45917 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=501 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=182.13939 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=407 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=286913 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=9 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=5.8 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=2670 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=9 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=9 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=9 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=5 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=4 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=3.5 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=7.333333333333333 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3080 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14828 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.01962630359212051 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.0211665664461816 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.06611570247933884 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3601 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=242 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=34 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=11921 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2721 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1382 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=17908 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8192 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2721 2018-07-21T05:39:55,329 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2026 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=702 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2320 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1027 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2828 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=717 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=172448 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=6 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1773 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=6 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=6 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=6 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=97960080 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39448559898 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1506 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3464 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1517 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=1.2 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2047 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=591.3913270637408 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=192 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2037 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=317142.3411358404 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195815491 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5474452554744526 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=192 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3342 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3342 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100072053 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=192 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=7852.697740112994 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=400 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2037 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.915 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=206 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5606 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2037 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=4088.244744355048 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=5969.282485875706 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=0.630057803468208 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:39:55,330 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":262,"usedSpace":45154304,"freeSpace":19748462592,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":258,"usedSpace":54920842,"freeSpace":19748462592,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,331 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.848083 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.09022809 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=698 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=163741184223 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.09022809 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":100075146,"adminState":"In Service","nonDfsUsedSpace":40911059318,"capacity":84278861824,"numBlocks":520,"version":"3.1.0","used":100075146,"remaining":39583899648,"blockScheduled":0,"blockPoolUsed":100075146,"blockPoolUsedPercent":0.118742876,"volfails":0,"lastBlockReport":19},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":48883144,"adminState":"In Service","nonDfsUsedSpace":40962275896,"capacity":84278861824,"numBlocks":537,"version":"3.1.0","used":48883144,"remaining":39449657626,"blockScheduled":1,"blockPoolUsed":48883144,"blockPoolUsedPercent":0.058001667,"volfails":0,"lastBlockReport":19},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":0,"usedSpace":57254463,"adminState":"In Service","nonDfsUsedSpace":40954674625,"capacity":84278861824,"numBlocks":512,"version":"3.1.0","used":57254463,"remaining":39448887578,"blockScheduled":1,"blockPoolUsed":57254463,"blockPoolUsedPercent":0.06793454,"volfails":0,"lastBlockReport":19},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":97960080,"adminState":"In Service","nonDfsUsedSpace":40913174384,"capacity":84278861824,"numBlocks":522,"version":"3.1.0","used":97960080,"remaining":39449682202,"blockScheduled":1,"blockPoolUsed":97960080,"blockPoolUsedPercent":0.116233274,"volfails":0,"lastBlockReport":19}} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"17908"} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157932127054 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=304172833 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=601 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=304172833 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50069425 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39448510746 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1492 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@3f9ed452 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1517 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39496884224 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100075146 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=698 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:39:55,332 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=698 2018-07-21T05:39:55,333 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:39:55-0700","windows":[{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":114}],"totalCount":114},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":67}],"totalCount":67},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":7}],"totalCount":7}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"*","topUsers":[{"user":"hiveptest","count":150}],"totalCount":150},{"opType":"delete","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":79}],"totalCount":79},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":25}],"totalCount":25},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":13}],"totalCount":13},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1348}],"totalCount":1348},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34},{"opType":"*","topUsers":[{"user":"hiveptest","count":11374}],"totalCount":11374},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1398}],"totalCount":1398},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2320}],"totalCount":2320},{"opType":"rename","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2139}],"totalCount":2139},{"opType":"create","topUsers":[{"user":"hiveptest","count":2721}],"totalCount":2721},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"open","topUsers":[{"user":"hiveptest","count":717}],"totalCount":717}],"windowLenMs":1500000}]} 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=304172833 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157932127054 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=230 69 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=774 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14829 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=2 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.5154826958105647 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=4.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1102 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=3.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=2 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=27 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2139 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.027676240208877285 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.08270676691729323 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2320 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8192 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2721 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.11801242236024845 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1348 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.18396226415094338 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.1956521739130435 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2793 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=34 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=717 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=10 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.1866412213740458 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=0.25147116516280893 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.49784229109454686 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2721 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1398 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.3333333333333333 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.1111111111111111 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.13887454827052143 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=1600 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=37 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.1175 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=43 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=257.5373 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=184.2872 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=407 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=6138 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=3 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8571428571428572 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3354 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=16 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=16 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=43 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=257.55054 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=185.70639 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=407 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3524 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1506 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=1.21 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2038 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=43.23915343915344 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=182 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2028 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=313080.08094098885 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95566866 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.5398416886543536 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=182 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1805 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1805 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,334 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=100986561 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=182 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=7627.66966966967 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=400 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2028 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.955 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=203 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5579 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2028 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=4679.894266175697 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5681.477477477478 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=0.4303030303030303 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6145 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8571428571428572 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=16 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=43 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=257.5514 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=122 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=185.70639 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=407 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@796411af 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1506 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39362560282 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=97960080 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:39:55,335 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=8 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176695527,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176694504,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384}] 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=122235 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.3103448275862069 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=8 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.1724137931034482 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=44742 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=68 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=68 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100075146 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39582556160 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1517 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=43 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=257.55856 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=817 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=187.12527 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=407 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=1 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=2 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=9 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=10 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:55,336 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3591 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1554 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=1.125 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2071 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=583.8390923156267 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=188 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2066 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=299191.4768211921 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=229 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52050419 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.55 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=188 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1158 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1158 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=58687606 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=188 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=8018.4622356495465 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=400 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2066 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.885 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=148 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5028 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2066 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5558.195696721312 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=5662.326283987915 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=0.5460122699386503 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=4 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=7 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=5 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=52.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94479661 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=5 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=57254463 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39448248602 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1554 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=10 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.25 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3519 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1492 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=1.23 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2036 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=554.7783613445379 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=182 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2029 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:39:55,337 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=294465.6415743217 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2631157 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.5710540115364446 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=182 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=399 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=399 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49144577 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=182 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=19370.69536423841 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=400 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2029 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.915 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4805 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2029 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=4589.980900052328 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5521.2913907284765 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=0.3973509933774834 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:39:55,667 INFO [pool-19-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":252,"usedSpace":3287990,"freeSpace":19790639104,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":261,"usedSpace":53966473,"freeSpace":19656421658,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=129314 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.17572463768115942 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=2 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0002_000001":1} 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.532608695652174 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=101146 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1106 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=2 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1106 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=24.0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=2 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=2 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=6846.0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:39:55,668 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=17908 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=17908 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=774 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=698 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=304172833 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=698 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157932127054 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=163741184223 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=230 69 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14829 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=17908 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=147.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@2f70cfaf 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1554 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39447019802 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=57254463 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=11796640 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.09846589961647491 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=85 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.15974653993663498 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4359458 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=26766 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=26766 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=43 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=257.42825 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=905 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=180.72052 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=407 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.1153846153846154 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=2 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=63 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=2 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,669 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=4.0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":269,"usedSpace":4669440,"freeSpace":19656388890,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":269,"usedSpace":45399985,"freeSpace":19790606336,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=2 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=9 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=10 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@4aa77365 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1492 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39446978842 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50069425 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":276,"usedSpace":53740171,"freeSpace":19656380698,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":247,"usedSpace":44219909,"freeSpace":19790598144,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:39:55,670 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=43 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=257.45917 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=501 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=182.13939 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=407 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=286913 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=9 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=5.8 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=2670 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=9 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=9 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=9 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=5 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=4 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=3.5 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=7.333333333333333 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3080 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14828 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.01962630359212051 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.0211665664461816 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.06611570247933884 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3601 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=242 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=34 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=11921 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2721 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1382 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=17908 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8192 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2721 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2026 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=702 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2320 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1027 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2828 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=717 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=172448 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=6 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1773 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=6 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=6 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=6 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=97960080 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39448559898 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1506 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3464 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1517 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=1.2 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2047 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=591.3913270637408 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=192 2018-07-21T05:39:55,671 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2037 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=317142.3411358404 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195815491 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5474452554744526 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=192 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3342 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3342 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100072053 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=192 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=7852.697740112994 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=400 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2037 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.915 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=206 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5606 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2037 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=4088.244744355048 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=5969.282485875706 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=0.630057803468208 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":262,"usedSpace":45154304,"freeSpace":19790561280,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":258,"usedSpace":54920842,"freeSpace":19790561280,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,672 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.848083 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.09022809 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=698 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=163741184223 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.09022809 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":100075146,"adminState":"In Service","nonDfsUsedSpace":40911059318,"capacity":84278861824,"numBlocks":520,"version":"3.1.0","used":100075146,"remaining":39583899648,"blockScheduled":0,"blockPoolUsed":100075146,"blockPoolUsedPercent":0.118742876,"volfails":0,"lastBlockReport":19},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":48883144,"adminState":"In Service","nonDfsUsedSpace":40962275896,"capacity":84278861824,"numBlocks":537,"version":"3.1.0","used":48883144,"remaining":39449657626,"blockScheduled":1,"blockPoolUsed":48883144,"blockPoolUsedPercent":0.058001667,"volfails":0,"lastBlockReport":19},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":57254463,"adminState":"In Service","nonDfsUsedSpace":40954674625,"capacity":84278861824,"numBlocks":512,"version":"3.1.0","used":57254463,"remaining":39448887578,"blockScheduled":1,"blockPoolUsed":57254463,"blockPoolUsedPercent":0.06793454,"volfails":0,"lastBlockReport":19},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":97960080,"adminState":"In Service","nonDfsUsedSpace":40913174384,"capacity":84278861824,"numBlocks":522,"version":"3.1.0","used":97960080,"remaining":39449682202,"blockScheduled":1,"blockPoolUsed":97960080,"blockPoolUsedPercent":0.116233274,"volfails":0,"lastBlockReport":19}} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"17908"} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157932127054 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=304172833 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=601 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=304172833 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50069425 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39448510746 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1492 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,673 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@57ab0b73 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1517 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39581089792 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100075146 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=698 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,674 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=698 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:39:55-0700","windows":[{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":114}],"totalCount":114},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":67}],"totalCount":67},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":7}],"totalCount":7}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"*","topUsers":[{"user":"hiveptest","count":150}],"totalCount":150},{"opType":"delete","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":79}],"totalCount":79},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":25}],"totalCount":25},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":13}],"totalCount":13},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1348}],"totalCount":1348},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34},{"opType":"*","topUsers":[{"user":"hiveptest","count":11374}],"totalCount":11374},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1398}],"totalCount":1398},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2320}],"totalCount":2320},{"opType":"rename","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2139}],"totalCount":2139},{"opType":"create","topUsers":[{"user":"hiveptest","count":2721}],"totalCount":2721},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"open","topUsers":[{"user":"hiveptest","count":717}],"totalCount":717}],"windowLenMs":1500000}]} 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=304172833 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157932127054 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=230 69 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=774 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14829 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=2 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.5154826958105647 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=4.0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1102 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=3.5 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=2 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=27 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2139 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.5 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.027676240208877285 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.08270676691729323 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2320 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8192 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2721 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.11801242236024845 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1348 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.18396226415094338 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.1956521739130435 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2793 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=34 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=717 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=10 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.5 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.1866412213740458 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=0.25147116516280893 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.49784229109454686 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=5 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2721 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1398 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.3333333333333333 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.1111111111111111 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.13887454827052143 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=1600 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=37 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.1175 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=43 2018-07-21T05:39:55,675 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=257.5373 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=184.2872 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=407 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=6138 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=3 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8571428571428572 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3354 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=16 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=16 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=43 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=257.55054 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=185.70639 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=407 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3524 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1506 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=1.21 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2038 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=43.23915343915344 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=182 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2028 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=313080.08094098885 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95566866 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.5398416886543536 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=182 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1805 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1805 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=100986561 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=182 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=7627.66966966967 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=400 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2028 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.955 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=203 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5579 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2028 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=4679.894266175697 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5681.477477477478 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=0.4303030303030303 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6145 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8571428571428572 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=16 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=43 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=257.5514 2018-07-21T05:39:55,676 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=122 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=185.70639 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=407 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@1bb9164b 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1506 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39446757658 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=97960080 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=8 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176695527,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176694504,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384}] 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=122235 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.3103448275862069 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=8 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.1724137931034482 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=44742 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=68 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=68 2018-07-21T05:39:55,677 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100075146 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39582556160 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1517 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=43 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=257.55856 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=817 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=187.12527 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=407 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=2 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=9 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=10 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3591 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1554 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=1.125 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2071 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=583.8390923156267 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=188 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2066 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=299191.4768211921 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=229 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52050419 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.55 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=188 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1158 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1158 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=58687606 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=188 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=8018.4622356495465 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=400 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2066 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.885 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=148 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5028 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2066 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5558.195696721312 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=5662.326283987915 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=0.5460122699386503 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=4 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=7 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=5 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=52.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94479661 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=5 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=1 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:39:55,678 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=57254463 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39448248602 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1554 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=10 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.25 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3519 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1492 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=1.23 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=400 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2036 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=554.7783613445379 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=182 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2029 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=294465.6415743217 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2631157 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.5710540115364446 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=182 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=399 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=399 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49144577 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=182 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=19370.69536423841 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=400 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2029 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.915 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4805 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2029 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=4589.980900052328 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5521.2913907284765 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=0.3973509933774834 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:39:56,370 INFO [pool-33-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1198"}] 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":252,"usedSpace":3287990,"freeSpace":19790413824,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":261,"usedSpace":53966473,"freeSpace":19656196378,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=129314 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.17572463768115942 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=2 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0002_000001":1} 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.532608695652174 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=101146 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1106 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=2 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1106 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=24.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=2 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=2 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=6846.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=17908 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=17908 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=774 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=698 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=304172833 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=698 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157932127054 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=163741184223 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=230 69 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14829 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=17908 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=147.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,371 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@187c298d 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1554 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39446569242 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=57254463 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=11796640 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.09846589961647491 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=85 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.15974653993663498 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4359458 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=26766 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=26766 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=43 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=257.42825 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=905 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=180.72052 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=407 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.1153846153846154 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=2 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=63 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=2 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=1 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=4.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1199"}] 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":269,"usedSpace":4669440,"freeSpace":19656167706,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":269,"usedSpace":45399985,"freeSpace":19790385152,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=1 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=2 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=9 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:56,372 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=10 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@f6e87e8 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1492 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39446536474 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50069425 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1199"}] 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":276,"usedSpace":53740171,"freeSpace":19656155418,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":247,"usedSpace":44219909,"freeSpace":19790372864,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=43 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=257.45917 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=501 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=182.13939 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=407 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=286913 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=9 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=5.8 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=2670 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=9 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=9 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=9 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=5 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=4 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=3.5 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=7.333333333333333 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3080 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14828 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.01962630359212051 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.0211665664461816 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.06611570247933884 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3601 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=242 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=34 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=11921 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2721 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1382 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=17908 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8192 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2721 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2026 2018-07-21T05:39:56,373 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=702 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2320 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1027 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2828 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=717 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=172448 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=6 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1773 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=6 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=6 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=6 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=97960080 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39448559898 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1506 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3464 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1517 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=1.2 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2047 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=591.3913270637408 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=192 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2037 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=317142.3411358404 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195815491 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5474452554744526 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=192 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3342 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3342 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100072053 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=192 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=7852.697740112994 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=400 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2037 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.915 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=206 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5606 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2037 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=4088.244744355048 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=5969.282485875706 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=0.630057803468208 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1199"}] 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":262,"usedSpace":45154304,"freeSpace":19790336000,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":258,"usedSpace":54920842,"freeSpace":19790336000,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:39:56,374 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.848083 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.09022809 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=698 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=163741184223 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.09022809 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":2,"usedSpace":100075146,"adminState":"In Service","nonDfsUsedSpace":40911059318,"capacity":84278861824,"numBlocks":520,"version":"3.1.0","used":100075146,"remaining":39583899648,"blockScheduled":0,"blockPoolUsed":100075146,"blockPoolUsedPercent":0.118742876,"volfails":0,"lastBlockReport":19},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":2,"usedSpace":48883144,"adminState":"In Service","nonDfsUsedSpace":40962275896,"capacity":84278861824,"numBlocks":537,"version":"3.1.0","used":48883144,"remaining":39449657626,"blockScheduled":1,"blockPoolUsed":48883144,"blockPoolUsedPercent":0.058001667,"volfails":0,"lastBlockReport":19},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":57254463,"adminState":"In Service","nonDfsUsedSpace":40954674625,"capacity":84278861824,"numBlocks":512,"version":"3.1.0","used":57254463,"remaining":39448887578,"blockScheduled":1,"blockPoolUsed":57254463,"blockPoolUsedPercent":0.06793454,"volfails":0,"lastBlockReport":19},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":2,"usedSpace":97960080,"adminState":"In Service","nonDfsUsedSpace":40913174384,"capacity":84278861824,"numBlocks":522,"version":"3.1.0","used":97960080,"remaining":39449682202,"blockScheduled":1,"blockPoolUsed":97960080,"blockPoolUsedPercent":0.116233274,"volfails":0,"lastBlockReport":19}} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"17908"} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157932127054 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=304172833 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=601 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=304172833 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:39:56,375 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50069425 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39448510746 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1492 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@491f8bb5 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1517 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39580639232 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100075146 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=698 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,376 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=698 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:39:56-0700","windows":[{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":114}],"totalCount":114},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":67}],"totalCount":67},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":7}],"totalCount":7}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"*","topUsers":[{"user":"hiveptest","count":150}],"totalCount":150},{"opType":"delete","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":79}],"totalCount":79},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":25}],"totalCount":25},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":13}],"totalCount":13},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1348}],"totalCount":1348},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34},{"opType":"*","topUsers":[{"user":"hiveptest","count":11374}],"totalCount":11374},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1398}],"totalCount":1398},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2320}],"totalCount":2320},{"opType":"rename","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2139}],"totalCount":2139},{"opType":"create","topUsers":[{"user":"hiveptest","count":2721}],"totalCount":2721},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"open","topUsers":[{"user":"hiveptest","count":717}],"totalCount":717}],"windowLenMs":1500000}]} 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=304172833 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157932127054 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=230 69 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=774 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14829 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=2 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.5154826958105647 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=4.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1102 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=3.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=2 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=27 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2139 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.027676240208877285 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.08270676691729323 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2320 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8192 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2721 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.11801242236024845 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1348 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.18396226415094338 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.1956521739130435 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2793 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=34 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=717 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=10 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.1866412213740458 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=0.25147116516280893 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.49784229109454686 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2721 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1398 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.3333333333333333 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.1111111111111111 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.13887454827052143 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=1600 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=37 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.1175 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=43 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=257.5373 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=184.2872 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=407 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=6138 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=3 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8571428571428572 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3354 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=16 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=16 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=43 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=257.55054 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=185.70639 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=407 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3524 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1506 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=1.21 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,378 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2038 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=43.23915343915344 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=182 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2028 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=313080.08094098885 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95566866 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.5398416886543536 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=182 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1805 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1805 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=100986561 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=182 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=7627.66966966967 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=400 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2028 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.955 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=203 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5579 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2028 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=4679.894266175697 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5681.477477477478 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=0.4303030303030303 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6145 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8571428571428572 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=16 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=43 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=257.5514 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=122 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=185.70639 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=407 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@60d73e90 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1506 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39446307098 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=97960080 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,379 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=8 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176695527,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176694504,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384}] 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=122235 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.3103448275862069 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=8 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.1724137931034482 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=44742 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=68 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=68 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100075146 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39582556160 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1517 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=43 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=257.55856 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=817 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=187.12527 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=407 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:39:56,380 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=2 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=9 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=10 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3591 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1554 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=1.125 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2071 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=583.8390923156267 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=188 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2066 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=299191.4768211921 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=229 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52050419 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.55 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=188 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1158 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1158 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=58687606 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=188 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=8018.4622356495465 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=400 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2066 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.885 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=148 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5028 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2066 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5558.195696721312 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=5662.326283987915 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=0.5460122699386503 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=4 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=7 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=5 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=52.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94479661 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=5 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=57254463 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39448248602 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1554 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=10 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.25 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3519 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1492 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=1.23 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,381 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2036 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=554.7783613445379 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=182 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2029 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=294465.6415743217 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2631157 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.5710540115364446 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=182 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=399 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=399 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49144577 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=182 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=19370.69536423841 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=400 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2029 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.915 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4805 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2029 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=4589.980900052328 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5521.2913907284765 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=0.3973509933774834 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:39:56,929 INFO [pool-46-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1199"}] 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":252,"usedSpace":3287990,"freeSpace":19746385920,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":261,"usedSpace":53966473,"freeSpace":19612168474,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=129314 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.17572463768115942 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=2 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0002_000001":1} 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.532608695652174 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=101146 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1106 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=2 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1106 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=24.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=2 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=2 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=6846.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=17908 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=17908 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=774 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=698 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=304172833 2018-07-21T05:39:56,930 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=698 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157932127054 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=163741184223 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=230 69 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14829 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=17908 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=147.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@16473966 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1554 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39358505242 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=57254463 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=3 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=90.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=11796640 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.09846589961647491 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=85 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.15974653993663498 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4359458 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=26766 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=26766 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=43 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=257.42825 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=905 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=180.72052 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=407 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.1153846153846154 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=2 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=63 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=2 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=1 2018-07-21T05:39:56,931 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=4.0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1200"}] 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":269,"usedSpace":4669440,"freeSpace":19612135706,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":269,"usedSpace":45399985,"freeSpace":19746353152,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=2 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=9 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=10 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@4d37c29c 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1492 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39358472474 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50069425 2018-07-21T05:39:56,932 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1200"}] 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":276,"usedSpace":53740171,"freeSpace":19612123418,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":247,"usedSpace":44219909,"freeSpace":19746340864,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=43 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=269.58594 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=257.45917 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=501 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=182.13939 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=407 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=286913 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=9 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=5.8 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=2670 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=9 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=9 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=9 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=5 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=4 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=3.5 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=7.333333333333333 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3080 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14828 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.01962630359212051 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.0211665664461816 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.06611570247933884 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3601 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=242 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=34 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=11921 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2721 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1382 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=17908 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8192 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2721 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2026 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:39:56,933 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=702 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2320 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1027 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2828 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=717 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=172448 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=6 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=1773 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=6 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=6 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=6 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=97960080 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39448559898 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1506 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3464 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1517 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=1.2 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2047 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=591.3913270637408 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=192 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2037 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=317142.3411358404 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195815491 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5474452554744526 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=192 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3342 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3342 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100072053 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=192 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=7852.697740112994 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=400 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2037 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.915 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=206 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5606 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2037 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=4088.244744355048 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=5969.282485875706 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=0.630057803468208 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1200"}] 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":262,"usedSpace":45154304,"freeSpace":19746304000,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":258,"usedSpace":54920842,"freeSpace":19746304000,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:39:56,934 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.768345 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.09057998 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=698 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164008810230 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.09057998 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":0,"usedSpace":100075146,"adminState":"In Service","nonDfsUsedSpace":41000474998,"capacity":84278861824,"numBlocks":520,"version":"3.1.0","used":100075146,"remaining":39494483968,"blockScheduled":0,"blockPoolUsed":100075146,"blockPoolUsedPercent":0.118742876,"volfails":0,"lastBlockReport":20},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":0,"usedSpace":50069425,"adminState":"In Service","nonDfsUsedSpace":41051070543,"capacity":84278861824,"numBlocks":537,"version":"3.1.0","used":50069425,"remaining":39359676698,"blockScheduled":1,"blockPoolUsed":50069425,"blockPoolUsedPercent":0.059409227,"volfails":0,"lastBlockReport":20},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":2,"usedSpace":57254463,"adminState":"In Service","nonDfsUsedSpace":40954674625,"capacity":84278861824,"numBlocks":512,"version":"3.1.0","used":57254463,"remaining":39448887578,"blockScheduled":1,"blockPoolUsed":57254463,"blockPoolUsedPercent":0.06793454,"volfails":0,"lastBlockReport":19},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":0,"usedSpace":97960080,"adminState":"In Service","nonDfsUsedSpace":41002590064,"capacity":84278861824,"numBlocks":522,"version":"3.1.0","used":97960080,"remaining":39360266522,"blockScheduled":1,"blockPoolUsed":97960080,"blockPoolUsedPercent":0.116233274,"volfails":0,"lastBlockReport":20}} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"17908"} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157663314766 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=305359114 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=601 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=305359114 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:39:56,935 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50069425 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39448510746 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1492 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@6e1b7d8c 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1517 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39492567040 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100075146 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=698 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,936 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=698 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:39:56-0700","windows":[{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":114}],"totalCount":114},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":67}],"totalCount":67},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":7}],"totalCount":7}],"windowLenMs":60000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":3}],"totalCount":3},{"opType":"*","topUsers":[{"user":"hiveptest","count":150}],"totalCount":150},{"opType":"delete","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":79}],"totalCount":79},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":25}],"totalCount":25},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":8}],"totalCount":8}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":13}],"totalCount":13},{"opType":"datanodeReport","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1348}],"totalCount":1348},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":34}],"totalCount":34},{"opType":"*","topUsers":[{"user":"hiveptest","count":11374}],"totalCount":11374},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1398}],"totalCount":1398},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2320}],"totalCount":2320},{"opType":"rename","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2139}],"totalCount":2139},{"opType":"create","topUsers":[{"user":"hiveptest","count":2721}],"totalCount":2721},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"open","topUsers":[{"user":"hiveptest","count":717}],"totalCount":717}],"windowLenMs":1500000}]} 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=305359114 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157663314766 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=230 69 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=774 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14829 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=2 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.5154826958105647 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=4.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1102 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=3.5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=2 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=27 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2139 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.027676240208877285 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.08270676691729323 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2320 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8192 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2721 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.11801242236024845 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1348 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.18396226415094338 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=0.1956521739130435 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2793 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=34 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=717 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=10 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=1.5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.1866412213740458 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=0.25147116516280893 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.49784229109454686 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2721 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1398 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.3333333333333333 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.1111111111111111 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.13887454827052143 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=1600 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=37 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.1175 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=43 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=257.5373 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=184.2872 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=407 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=6138 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,937 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=3 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.8571428571428572 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3354 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=16 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=16 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=43 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=257.55054 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=185.70639 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=407 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3524 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1506 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=1.21 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2038 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=43.23915343915344 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=182 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2028 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=313080.08094098885 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95566866 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.5398416886543536 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=182 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1805 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1805 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=100986561 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=182 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=7627.66966966967 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=400 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2028 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.955 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=203 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5579 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2028 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=4679.894266175697 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5681.477477477478 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=0.4303030303030303 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=4.25 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=12 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6145 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.8571428571428572 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=16 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=43 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=257.5514 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=122 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=185.70639 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=407 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:39:56,938 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@1cae2794 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1506 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39358234906 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=97960080 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=4891 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.5 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=2 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=4.25 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=2741 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=12 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=12 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=8 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=3 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=72.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=3 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532176695527,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532176694504,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384}] 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=122235 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.3103448275862069 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=8 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.1724137931034482 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=44742 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=68 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=68 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,939 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100075146 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39582556160 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1517 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=43 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=269.83594 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=257.55856 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=817 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=187.12527 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=407 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=2 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=9 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=2 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=10 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=8 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3591 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1554 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=1.125 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:39:56,940 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2071 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=583.8390923156267 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=188 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2066 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=299191.4768211921 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=229 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52050419 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.55 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=188 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1158 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1158 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=58687606 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=188 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=8018.4622356495465 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=400 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2066 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.885 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=148 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5028 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2066 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5558.195696721312 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=5662.326283987915 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=0.5460122699386503 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=4 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=7 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=5 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=52.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94479661 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=5 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=57254463 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39448248602 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1554 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=10 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.25 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:39:56,942 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3519 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1492 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=1.23 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=400 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2036 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=554.7783613445379 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=182 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2029 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=294465.6415743217 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2631157 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.5710540115364446 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=182 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=399 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=399 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49144577 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=182 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=19370.69536423841 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=400 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2029 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.915 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4805 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2029 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=4589.980900052328 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5521.2913907284765 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=0.3973509933774834 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:39:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:40:14,679 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:40:14,680 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:40:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:40:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:40:17,526 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 7, retrying in 78,277ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:40:19,242 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:40:19,304 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:40:49,243 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:40:49,304 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:41:19,243 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:41:19,305 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:41:35,806 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 8, retrying in 65,689ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:41:49,243 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:41:49,305 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:42:19,244 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:42:19,305 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:42:41,499 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 9, retrying in 35,150ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:42:49,244 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:42:49,306 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:43:16,660 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.SQLMetadataConnector: Exception creating table org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:43:16,682 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Failed org.apache.hadoop.hive.ql.metadata.HiveException: org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:933) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,685 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReOptimizePlugin: ReOptimization: retryPossible: false 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,685 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.OperatorStatsReaderHook: Reading runtime statistics for tez vertex task: Map 1 2018-07-21T05:43:16,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.OperatorStatsReaderHook: Reading runtime statistics for tez vertex task: Reducer 2 2018-07-21T05:43:16,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,687 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) 2018-07-21T05:43:16,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:43:16,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,687 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:43:16,687 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:43:16,687 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721053858_15613a0f-ad83-4b82-aba6-4aaf60ca0138); Time taken: 257.798 seconds 2018-07-21T05:43:16,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:43:16,688 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:43:16,688 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:43:16,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: org.apache.hadoop.hive.ql.reexec.ReExecutionOverlayPlugin@4974318d.shouldReExecute = false 2018-07-21T05:43:16,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: org.apache.hadoop.hive.ql.reexec.ReOptimizePlugin@645d860.shouldReExecute = false 2018-07-21T05:43:16,691 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-38-58_734_8651408448308552626-1 2018-07-21T05:43:16,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-38-58_734_8651408448308552626-1 2018-07-21T05:43:16,714 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:43:16,714 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:43:16,714 ERROR [main] QTestUtil: Client execution failed with error code = 1 running " CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL" fname=druidmini_expressions.q See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. 2018-07-21T05:43:16,714 INFO [main] control.CoreCliDriver: Done query druidmini_expressions.q. succeeded=false, skipped=false. ElapsedTime(ms)=257984 2018-07-21T05:43:16,716 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000a 2018-07-21T05:43:16,717 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d000a closed 2018-07-21T05:43:16,724 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@50bb6247 2018-07-21T05:43:16,727 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:43:16,727 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:37860 2018-07-21T05:43:16,727 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:37860 2018-07-21T05:43:16,729 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:43:16,729 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000c with negotiated timeout 40000 for client /127.0.0.1:37860 2018-07-21T05:43:16,730 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:43:16,732 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:43:16,732 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:43:16,732 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:43:16,732 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,732 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:43:16,733 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:43:16,733 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:43:16,733 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:43:16,733 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,733 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:43:16,733 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:43:16,734 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:43:16,737 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:43:16,744 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:43:16,745 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:43:16,751 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:43:16,753 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:43:16,754 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:43:16,754 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:43:16,755 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,756 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:43:16,756 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:43:16,757 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,757 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,757 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,759 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,760 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,760 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:43:16,760 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,760 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:43:16,760 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,760 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:43:16,760 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:43:16,765 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,765 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,765 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,765 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:43:16,765 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:43:16,770 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,770 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,770 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,770 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:43:16,770 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:43:16,778 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,779 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,779 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,780 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,780 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,786 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,787 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,787 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,787 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:43:16,788 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:43:16,794 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,794 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,795 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,795 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:43:16,795 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:43:16,796 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,797 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,797 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,797 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:43:16,797 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:43:16,797 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,797 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,798 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,798 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,798 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,804 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,805 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,805 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,805 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,805 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,811 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,812 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,812 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,812 INFO [main] metastore.HiveMetaStore: 0: drop_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,812 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=drop_table : tbl=hive.default.druid_partitioned_table 2018-07-21T05:43:16,813 DEBUG [main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:43:16,855 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:43:16,856 DEBUG [main] metastore.ReplChangeManager: Repl policy is not set for database 2018-07-21T05:43:16,856 DEBUG [main] utils.FileUtils: deleting hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table 2018-07-21T05:43:16,858 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:43:16,858 INFO [main] druid.DruidStorageHandler: Dropping with purge all the data for data source default.druid_partitioned_table 2018-07-21T05:43:16,858 DEBUG [main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:43:16,859 INFO [main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:43:16,860 DEBUG [main] common.RetryUtils: Failed on try 1, retrying in 606ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:17,469 DEBUG [main] common.RetryUtils: Failed on try 2, retrying in 1,949ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:19,244 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:43:19,306 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:43:19,421 DEBUG [main] common.RetryUtils: Failed on try 3, retrying in 3,501ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:22,925 WARN [main] common.RetryUtils: Failed on try 4, retrying in 8,024ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:30,952 WARN [main] common.RetryUtils: Failed on try 5, retrying in 11,500ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:42,454 WARN [main] common.RetryUtils: Failed on try 6, retrying in 21,737ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:43:49,245 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:43:49,306 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:44:04,194 WARN [main] common.RetryUtils: Failed on try 7, retrying in 58,971ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:44:19,245 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:44:19,306 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:44:41,493 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0002/recovery/1/summary is closed by DFSClient_NONMAPREDUCE_-324715073_1 2018-07-21T05:44:41,626 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000b 2018-07-21T05:44:41,711 DEBUG [ContainersLauncher #3] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #3, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,835 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,836 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,836 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,836 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:43,839 DEBUG [ApplicationMasterLauncher #3] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:44:43,839 DEBUG [ApplicationMasterLauncher #3] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:44:43,841 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0002_000001 (auth:SIMPLE) 2018-07-21T05:44:44,837 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,837 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,838 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:44,838 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:44:49,245 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:44:49,307 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:45:03,168 WARN [main] common.RetryUtils: Failed on try 8, retrying in 55,270ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:45:19,245 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:45:19,307 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:45:49,246 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:45:49,307 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:45:58,442 WARN [main] common.RetryUtils: Failed on try 9, retrying in 86,049ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:46:19,246 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:46:19,307 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:46:49,246 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:46:49,308 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:47:19,247 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:47:19,308 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:47:24,494 INFO [main] control.CoreCliDriver: PerTestTearDown done. ElapsedTime(ms)=247779 2018-07-21T05:47:24,506 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000c 2018-07-21T05:47:24,507 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d000c closed 2018-07-21T05:47:24,507 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@2de22551 2018-07-21T05:47:24,510 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:47:24,510 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:38282 2018-07-21T05:47:24,510 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:38282 2018-07-21T05:47:24,511 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:47:24,512 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000d with negotiated timeout 40000 for client /127.0.0.1:38282 2018-07-21T05:47:24,512 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:47:24,514 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:47:24,514 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:47:24,514 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:47:24,514 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:47:24,514 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:47:24,514 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:47:24,514 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:47:24,514 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:47:24,515 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:47:24,515 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:47:24,515 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:47:24,515 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:47:24,516 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:47:24,523 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:47:24,524 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,530 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,532 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:47:24,532 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:47:24,532 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:47:24,533 INFO [main] control.CoreCliDriver: PerTestSetup done. ElapsedTime(ms)=36 2018-07-21T05:47:24,533 INFO [main] control.CoreCliDriver: Begin query: druidmini_test1.q 2018-07-21T05:47:24,533 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,533 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:47:24,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:47:24,534 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,534 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:47:24,534 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,534 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:47:24,534 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:47:24,534 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,534 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:47:24,534 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:47:24,534 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:47:24,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:47:24,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:47:24,535 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab): CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:47:24,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,536 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:47:24,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:47:24,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:47:24,539 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:47:24,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,540 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:47:24,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,543 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:47:24,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,543 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:47:24,543 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:47:24,544 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:47:24,544 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:47:24,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@4fbbb01e, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:47:24,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:47:24,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:47:24,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:47:24,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:47:24,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:47:24,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:47:24,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,551 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:47:24,551 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:47:24,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,551 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,553 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:47:24,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,554 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:47:24,554 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:47:24,554 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_table_n3 position=22 2018-07-21T05:47:24,556 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:47:24,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,556 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:47:24,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table_n3 2018-07-21T05:47:24,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table_n3 2018-07-21T05:47:24,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:47:24,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:47:24,561 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,569 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:47:24,569 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:47:24,569 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,569 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,569 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:47:24,570 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1 2018-07-21T05:47:24,572 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:47:24,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,574 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,575 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,575 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,576 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,576 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,577 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,577 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,578 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,578 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:47:24,579 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:47:24,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,579 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:47:24,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,581 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,582 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,583 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,598 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$9], cboolean2=[$10]) HiveFilter(condition=[IS NOT NULL($8)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], cstring2=[$7], ctimestamp1=[$8], cboolean1=[$10], cboolean2=[$11]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,599 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,600 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,601 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($8):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cstring2=[$7], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10], cboolean2=[$11]) HiveFilter(condition=[IS NOT NULL($8)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:47:24,615 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_table_n3 position=22 2018-07-21T05:47:24,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,615 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_table_n3 2018-07-21T05:47:24,615 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_table_n3 2018-07-21T05:47:24,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,617 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,617 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:47:24,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,618 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:47:24,623 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,624 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:47:24,624 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:47:24,624 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,624 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,624 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,625 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:47:24,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:47:24,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:47:24,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp1)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring2) cstring2) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean2) cboolean2)) 2018-07-21T05:47:24,626 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:47:24,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:47:24,627 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:47:24,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:47:24,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:47:24,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10003 2018-07-21T05:47:24,628 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,628 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,628 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cstring2,_col2: string)(cdouble,_col3: double)(cfloat,_col4: float)(ctinyint,_col5: tinyint)(csmallint,_col6: smallint)(cint,_col7: int)(cbigint,_col8: bigint)(cboolean1,_col9: boolean)(cboolean2,_col10: boolean)} 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:47:24,629 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:47:24,629 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:47:24,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,630 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=1 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp1 is not null 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:47:24,631 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity]] 2018-07-21T05:47:24,632 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:47:24,632 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col10=Column[_col10], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8], VALUE._col9=Column[_col9]} 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp1 is not null (type: boolean) 2018-07-21T05:47:24,632 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:47:24,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:47:24,633 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:47:24,650 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 0.307683ms + 0.011455ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:47:24,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,652 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp1= colName: ctimestamp1 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cstring2= colName: cstring2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cboolean2= colName: cboolean2 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col9= colName: _col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col10= colName: _col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, _col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,653 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 3115 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 3115 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 2601650 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: string countDistincts: 6122 numNulls: 6230 avgColLen: 12.136555989583334 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col10= colName: VALUE._col10 colType: boolean countDistincts: 2 numNulls: 6230 avgColLen: 4.0 numTrues: 3983 numFalses: 5190 isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col9= colName: VALUE._col9 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:47:24,654 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2) 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp1 is not null 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2) 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp1]) Column[cstring1] Column[cstring2] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1] Column[cboolean2]) 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:47:24,654 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] Column[_col9] Column[_col10] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring2,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean2,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: string|{null}cstring2,_col3: double|{null}cdouble,_col4: float|{null}cfloat,_col5: tinyint|{null}ctinyint,_col6: smallint|{null}csmallint,_col7: int|{null}cint,_col8: bigint|{null}cbigint,_col9: boolean|{null}cboolean1,_col10: boolean|{null}cboolean2,__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[VALUE._col9] Column[VALUE._col10] Column[KEY.__time_granularity]) 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cstring2: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},cboolean2: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:47:24,655 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,655 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,655 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,656 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:47:24,656 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:47:24,656 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:47:24,657 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:47:24,658 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_table_n3 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:47:24,658 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of DependencyCollectionTask 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of StatsTask 2018-07-21T05:47:24,658 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of MoveTask 2018-07-21T05:47:24,658 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Examining input format to see if vectorization is enabled. 2018-07-21T05:47:24,659 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Vectorization is enabled for input format(s) [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:47:24,659 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing MapWork... (vectorizedVertexNum 0) 2018-07-21T05:47:24,666 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] vector.VectorizationContext: Input Expression = GenericUDFOPNotNull(Column[ctimestamp1]), Vectorized Expression = SelectColumnIsNotNull(col 8:timestamp) 2018-07-21T05:47:24,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: vectorizeOperator org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator 2018-07-21T05:47:24,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: vectorizeOperator org.apache.hadoop.hive.ql.plan.FilterDesc 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorization enabled: true 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorized: false 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map notVectorizedReason: Select expression for SELECT operator: Vectorizing data type timestamp with local time zone not supported 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorizedVertexNum: 0 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map enabledConditionsMet: [hive.vectorized.use.vectorized.input.format IS true] 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map inputFileFormatClassNameSet: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:47:24,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Using reduce tag 0 2018-07-21T05:47:24,699 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lazybinary.LazyBinarySerDe: LazyBinarySerDe initialized with: columnNames=[_col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10] columnTypes=[timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:47:24,699 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing ReduceWork... (vectorizedVertexNum 1) 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorization enabled: true 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorized: false 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce notVectorizedReason: Select expression for SELECT operator: Vectorizing data type timestamp with local time zone not supported 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorizedVertexNum: 1 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer hive.vectorized.execution.reduce.enabled: true 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer engine: tez 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp1 is not null 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:47:24,700 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:47:24,700 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cstring2, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null), FieldSchema(name:cboolean2, type:boolean, comment:null)], properties:null) 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=1, getUniqueConstraints_(UniqueConstraintsRequest, )=1, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=19, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab); Time taken: 0.166 seconds 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab): CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_table_n3 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab 2018-07-21T05:47:24,701 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:47:24,701 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,702 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:47:24,702 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:47:24,714 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:47:24,735 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-47-24_536_5880362027653075500-1 2018-07-21T05:47:24,735 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-47-24_536_5880362027653075500-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:47:24,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:47:24,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:47:24,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:47:24,736 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab 2018-07-21T05:47:24,736 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:47:24,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:47:24,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,737 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_table_n3 ...NULL (Stage-1) 2018-07-21T05:47:24,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\n\nCREATE EXTERNAL TABLE druid_table_n3\nSTORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\nTBLPROPERTIES (\"druid.segment.granularity\" = \"HOUR\", \"druid.query.granularity\" = \"MINUTE\")\nAS\nSELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,\n cstring1,\n cstring2,\n cdouble,\n cfloat,\n ctinyint,\n csmallint,\n cint,\n cbigint,\n cboolean1,\n cboolean2\n FROM alltypesorc where ctimestamp1 IS NOT NULL"} 2018-07-21T05:47:24,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:47:24,737 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,739 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,739 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:47:24,740 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,740 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 2.97KB 2018-07-21T05:47:24,746 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10003 2018-07-21T05:47:24,746 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10003 2018-07-21T05:47:24,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10001 2018-07-21T05:47:24,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10002 2018-07-21T05:47:24,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,751 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,752 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-47-24_536_5880362027653075500-1 2018-07-21T05:47:24,753 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:47:24,753 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,753 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:47:24,755 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,755 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.34KB 2018-07-21T05:47:24,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:47:24,765 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,768 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:24,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0002, dagName=CREATE EXTERNAL TABLE druid_table_n3 ...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab } 2018-07-21T05:47:24,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0002, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0002/, diagnostics=Session timed out, lastDAGCompletionTime=1532176759923 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 2018-07-21T05:47:24,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Tez session was closed. Reopening... 2018-07-21T05:47:24,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Closing Tez Session 2018-07-21T05:47:24,780 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Shutting down Tez Session, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0002 2018-07-21T05:47:24,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0002, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0002/, diagnostics=Session timed out, lastDAGCompletionTime=1532176759923 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 2018-07-21T05:47:24,783 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Failed to shutdown Tez Session via proxy org.apache.tez.dag.api.SessionNotRunning: Application not running, applicationId=application_1532175606211_0002, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0002/, diagnostics=Session timed out, lastDAGCompletionTime=1532176759923 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 at org.apache.tez.client.TezClientUtils.getAMProxy(TezClientUtils.java:901) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.getAMProxy(TezClient.java:958) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.stop(TezClient.java:641) [tez-api-0.9.1.jar:0.9.1] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.closeClient(TezSessionState.java:706) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.close(TezSessionState.java:673) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopenInternal(TezSessionPoolManager.java:492) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopen(TezSessionPoolManager.java:483) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.reopen(TezSessionState.java:931) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.getNewTezSessionOnError(TezTask.java:530) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.submit(TezTask.java:546) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:220) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:47:24,784 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Could not connect to AM, killing session via YARN, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0002 2018-07-21T05:47:24,784 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Killed application application_1532175606211_0002 2018-07-21T05:47:24,784 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state STOPPED 2018-07-21T05:47:24,785 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Attemting to clean up resources for ee745c13-27f8-4940-a347-c8307a2da8be: null 2018-07-21T05:47:24,785 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: User of session id ee745c13-27f8-4940-a347-c8307a2da8be is hiveptest 2018-07-21T05:47:24,786 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting resources to hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources; 1 additional files, 1 localized resources 2018-07-21T05:47:24,786 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:47:24,787 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:47:24,789 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar] is hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:47:24,789 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629385 for hdfs://localhost:35925/user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:47:24,790 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:47:24,790 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,791 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,791 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:47:24,792 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,792 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:47:24,793 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:47:24,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar] is hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:47:24,795 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629464 for hdfs://localhost:35925/user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=10, tez(original):tez.runtime.io.sort.mb=24, tez(final):tez.runtime.io.sort.mb=24 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=5, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=5 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.05, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.05 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=10, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=10 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=20000, tez(final):tez.runtime.shuffle.connect.timeout=20000 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=0.4, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.4 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.node-blacklisting.enable, mr initial value=false, tez(original):tez.am.node-blacklisting.enabled=false, tez(final):tez.am.node-blacklisting.enabled=false 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=1024, tez(final):tez.counters.max=1024 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2018-07-21T05:47:24,813 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2018-07-21T05:47:24,818 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting Tez Session access for sessionId=ee745c13-27f8-4940-a347-c8307a2da8be with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:47:24,820 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez Client Version: [ component=tez-api, version=0.9.1, revision=23b58b2b996eee255aab1a045412de00677ca2f1, SCM-URL=scm:git:https://git-wip-us.apache.org/repos/asf/tez.git, buildTime=2017-12-13T00:06:01Z ] 2018-07-21T05:47:24,820 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Opening new Tez Session (id: ee745c13-27f8-4940-a347-c8307a2da8be, scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be) 2018-07-21T05:47:24,820 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state INITED 2018-07-21T05:47:24,838 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.RMProxy: Connecting to ResourceManager at hive-ptest-slaves-a56.c.gcp-hive-upstream.internal/10.128.0.18:59658 2018-07-21T05:47:24,838 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl is started 2018-07-21T05:47:24,838 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Session mode. Starting session. 2018-07-21T05:47:24,838 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris value from configuration: hdfs://localhost:35925/user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:47:24,838 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris.classpath value from configuration: null 2018-07-21T05:47:24,846 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez system stage directory hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003 doesn't exist and is created 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:47:24,856 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:47:24,856 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744546_3722, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/tez-conf.pb 2018-07-21T05:47:24,881 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/tez-conf.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:47:24,885 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:47:24,885 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744547_3723, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/tez.session.local-resources.pb 2018-07-21T05:47:24,889 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/tez.session.local-resources.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:47:24,900 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Submitted application application_1532175606211_0003 2018-07-21T05:47:24,902 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: The url to track the Tez Session: http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0003/ 2018-07-21T05:47:24,963 DEBUG [ApplicationMasterLauncher #4] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:47:24,963 DEBUG [ApplicationMasterLauncher #4] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:47:24,965 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0003_000001 (auth:SIMPLE) 2018-07-21T05:47:24,985 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0003 2018-07-21T05:47:25,131 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:47:25,131 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:47:25,131 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:47:25,131 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:47:25,131 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:47:25,134 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:47:25,138 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:47:25,138 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:47:25,138 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:47:25,138 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:47:25,237 DEBUG [ContainersLauncher #3] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #3, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:47:26,240 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:26,240 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:31,990 INFO [Socket Reader #1 for port 60399] ipc.Server: Auth successful for appattempt_1532175606211_0003_000001 (auth:SIMPLE) 2018-07-21T05:47:32,091 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:38309 2018-07-21T05:47:32,093 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:38309 2018-07-21T05:47:32,095 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000e with negotiated timeout 40000 for client /127.0.0.1:38309 2018-07-21T05:47:32,498 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:47:32,498 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:47:32,498 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0003, dagName=CREATE EXTERNAL TABLE druid_table_n3 ...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab } 2018-07-21T05:47:32,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:32,656 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:47:32,657 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:32,657 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744548_3724, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/summary 2018-07-21T05:47:32,757 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/summary for DFSClient_NONMAPREDUCE_1237452931_1 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:47:32,770 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:47:32,771 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744549_3725, replicas=127.0.0.1:52570, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/dag_1532175606211_0003_1.recovery 2018-07-21T05:47:32,791 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/dag_1532175606211_0003_1.recovery for DFSClient_NONMAPREDUCE_1237452931_1 2018-07-21T05:47:32,878 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0003, dagId=dag_1532175606211_0003_1, dagName=CREATE EXTERNAL TABLE druid_table_n3 ...NULL (Stage-1) 2018-07-21T05:47:32,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:32,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:32,878 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:33,887 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:33,887 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0003) 2018-07-21T05:47:33,904 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:47:35,493 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0003_000001 (auth:SIMPLE) 2018-07-21T05:47:35,505 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0003 2018-07-21T05:47:35,642 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:47:35,642 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:47:35,642 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:47:35,642 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:47:35,642 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:47:35,647 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:47:35,651 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:47:35,651 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:47:35,651 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:47:35,651 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:47:35,698 DEBUG [ContainersLauncher #5] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #5, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:47:36,701 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:36,702 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:36,927 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:47:39,445 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:39,445 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:47:42,466 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:47:42,968 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:42,968 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0/1 2018-07-21T05:47:43,471 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:43,471 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:47:44,504 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:47:44,504 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744550_3726, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/d8c61ee7b1cf459c8867e16311e70544/0_descriptor.json 2018-07-21T05:47:44,547 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/d8c61ee7b1cf459c8867e16311e70544/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:44,559 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,559 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:47:44,560 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:47:44,560 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744551_3727, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_47_24.628-07_00/0_index.zip 2018-07-21T05:47:44,566 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/d8c61ee7b1cf459c8867e16311e70544/0_index.zip is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:47:44,587 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:47:44,587 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744552_3728, replicas=127.0.0.1:45625, 127.0.0.1:52570, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/segmentsDescriptorDir/default.druid_table_n3_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T054724.628-0700.json 2018-07-21T05:47:44,593 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/segmentsDescriptorDir/default.druid_table_n3_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T054724.628-0700.json is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:47:45,207 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:45,207 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744553_3729, replicas=127.0.0.1:40780, 127.0.0.1:45625, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/11931240d96b42108eed49f4393605e6/0_index.zip 2018-07-21T05:47:45,240 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570, 127.0.0.1:40780] 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:47:45,241 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:47:45,241 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744554_3730, replicas=127.0.0.1:40780, 127.0.0.1:52570, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/11931240d96b42108eed49f4393605e6/0_descriptor.json 2018-07-21T05:47:45,283 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/11931240d96b42108eed49f4393605e6/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:45,288 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/intermediateSegmentDir/default.druid_table_n3/11931240d96b42108eed49f4393605e6/0_index.zip is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:45,294 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,294 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,294 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:47:45,295 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:47:45,295 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,295 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,295 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:47:45,295 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:47:45,295 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744555_3731, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:40780 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/segmentsDescriptorDir/default.druid_table_n3_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T054724.628-0700.json 2018-07-21T05:47:45,300 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab/segmentsDescriptorDir/default.druid_table_n3_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T054724.628-0700.json is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:47:45,318 DEBUG [IPC Server handler 8 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:47:45,318 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744556_3732, replicas=127.0.0.1:33099, 127.0.0.1:45625, 127.0.0.1:52570 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:47:45,324 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0003_r_000000_0_-633576248_30 2018-07-21T05:47:45,371 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/dag_1532175606211_0003_1.recovery is closed by DFSClient_NONMAPREDUCE_1237452931_1 2018-07-21T05:47:45,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:45,385 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:47:45,385 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@38d7e9ec, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_table_n3 2018-07-21T05:47:45,393 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:45,393 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_table_n3 from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1/-ext-10002 2018-07-21T05:47:45,404 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:47:45,404 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:47:45,404 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_table_n3 2018-07-21T05:47:45,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_table_n3 on null 2018-07-21T05:47:45,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:47:45,405 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2] types: [timestamp with local time zone('US/Pacific'), string, string, double, float, tinyint, smallint, int, bigint, boolean, boolean] 2018-07-21T05:47:45,406 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:45,407 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:47:45,407 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:47:45,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:45,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:45,407 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:45,409 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:45,410 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:47:45,418 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:47:45,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:47:45,419 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:47:45,419 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:47:45,420 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:47:45,421 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 1, retrying in 1,107ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:47:46,531 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 2, retrying in 2,025ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:47:48,559 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 3, retrying in 4,263ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:47:49,247 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:47:49,308 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:47:52,825 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 4, retrying in 7,361ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:47:54,111 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0003_000001 (auth:SIMPLE) 2018-07-21T05:47:54,134 WARN [ContainersLauncher #5] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0003_01_000002 is : 143 2018-07-21T05:47:54,135 DEBUG [ContainersLauncher #5] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #5, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,248 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,249 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,249 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,249 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,251 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:47:56,252 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:48:00,189 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 5, retrying in 23,426ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:48:19,247 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:48:19,309 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:48:23,618 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 6, retrying in 32,110ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:48:49,247 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:48:49,309 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:48:55,731 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 7, retrying in 58,769ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:49:19,248 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:49:19,309 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:49:40,946 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@330453223 (XID = 195), (SESSIONID = 29), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:40,952 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:43,497 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@263841248 (XID = 178), (SESSIONID = 13), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:43,503 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:45,577 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@286061646 (XID = 180), (SESSIONID = 15), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:45,591 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@941292048 (XID = 1406), (SESSIONID = 45), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:49,248 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:49:49,309 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:49:49,876 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@201470823 (XID = 183), (SESSIONID = 17), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:49,881 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@157853678 (XID = 1408), (SESSIONID = 47), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:50,892 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1834463844 (XID = 166), (SESSIONID = 1), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:50,898 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1479869791 (XID = 1410), (SESSIONID = 49), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:53,086 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1606284223 (XID = 185), (SESSIONID = 19), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:53,092 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@505989370 (XID = 1412), (SESSIONID = 51), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:54,409 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@877721801 (XID = 170), (SESSIONID = 5), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:54,415 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1551612355 (XID = 1414), (SESSIONID = 53), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:54,503 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 8, retrying in 55,220ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:49:54,678 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1146258794 (XID = 174), (SESSIONID = 9), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:49:54,682 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1285790291 (XID = 1416), (SESSIONID = 55), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:49:54,913 INFO [pool-5-thread-1] NameNodeMetricsLog: >> Begin NameNode metrics dump 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":253... 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:49:54,914 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=178908 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.15046296296296297 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=3 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0003_000001":1} 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.4652777777777778 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:SentBytes=139321 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1538 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=3 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1538 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=28.0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=3 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=3 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=7010.0 2018-07-21T05:49:54,915 INFO [pool-5-thread-1] NameNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,916 INFO [pool-5-thread-1] NameNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=18008 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:49:54,917 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=18008 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:FilesTotal=788 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlocksTotal=704 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsed=310917666 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=704 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemaining=157140075342 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=164526491102 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=239 71 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TotalSyncCount=14905 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=18008 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystem:CapacityRemainingGB=146.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@47addb01 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1559 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39248953626 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=59065605 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=4 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=6.0 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,918 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=12259455 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.1702970297029703 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=109 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.1495049504950495 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:SentBytes=4428474 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=27776 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=27776 2018-07-21T05:49:54,919 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=46 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=259.38776 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=908 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=408.87292 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=406 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:49:54,923 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.125 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=3 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=87 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=3 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=2 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=0.0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":270... 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=2 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=3 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=11 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=12 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:49:54,924 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@a5324c0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1496 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39248912666 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50073600 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":279... 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,925 INFO [pool-5-thread-1] NameNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=46 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=259.41162 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=516 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=408.87292 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=406 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,929 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=344993 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.3333333333333333 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=12 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:SentBytes=3542 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=12 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=12 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=12 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=6 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=6 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=2.0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=5.0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:49:54,930 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3104 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsNumOps=14904 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.14473684210526316 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.06 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.058333333333333334 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesCreated=3640 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=362 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetListingOps=35 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TotalFileOps=12035 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:AddBlockOps=2732 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:DeleteFileOps=1391 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TransactionsNumOps=18008 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8229 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:CreateFileOps=2732 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesRenamed=2030 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=704 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FileInfoOps=2392 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1032 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesDeleted=2853 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetBlockLocations=723 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=230008 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=1.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=7 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=6.0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:SentBytes=2083 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=7 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=7 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=7 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:DfsUsed=101594397 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:Remaining=39248855322 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:49:54,931 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1510 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3486 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1519 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=0.87 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=600 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2056 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=1584.625 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=194 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2045 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=2281611.409090909 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195848907 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5555555555555556 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=194 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3346 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3346 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100865053 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=194 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=28583.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=600 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2045 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.855 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=210 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5633 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2045 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=58028.18518518518 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=7652.75 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=12.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-20... 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":265... 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,932 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentRemaining=46.613132 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentUsed=0.092228845 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:TotalBlocks=704 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164526491102 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.092228845 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"us... 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"18008"} 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Free=157140075342 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=310917666 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Threads=603 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_A... 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Used=310917666 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hivepte... 2018-07-21T05:49:54,944 INFO [pool-5-thread-1] NameNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/t... 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5... 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:DfsUsed=50073600 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:Remaining=39248806170 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1496 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@133f30bf 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1519 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39383015424 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100184064 2018-07-21T05:49:54,945 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=704 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,946 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlocksTotal=704 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:49:54-0700","windows":[{"ops":[],"windowLenMs":60000},{"ops":[{"opType":"rename (options=[TO_TRASH])... 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityUsed=310917666 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityRemaining=157140075342 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncTimes=239 71 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FilesTotal=788 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalSyncCount=14905 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=3 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.46153846153846156 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=3.0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1531 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=0.0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=4 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=33 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2161 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.05405405405405406 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.25 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2392 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8229 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2732 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:49:54,948 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.8333333333333334 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1352 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.25 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=2.272727272727273 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2804 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=35 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=723 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=12 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=2.0 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.09090909090909091 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.2727272727272727 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.5454545454545454 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=7 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2732 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1409 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.0 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.45454545454545453 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=2400 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=51 2018-07-21T05:49:54,949 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.08375 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=46 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=259.46378 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=411.7371 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=406 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=7382 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.25 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=4 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:SentBytes=3964 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=20 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=20 2018-07-21T05:49:54,953 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsWaiting=46 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=259.46378 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapUsedM=411.7371 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=406 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:49:54,957 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3550 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1510 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=0.76 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=600 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2049 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=34722.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=183 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2038 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1952077.5384615385 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95568293 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.18181818181818182 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=183 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1807 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1807 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=102014337 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=183 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=8491.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=600 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2038 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.725 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=212 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5616 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2038 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=25693.243243243243 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5264.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=1.6 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=17 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6171 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.0 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,958 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=20 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,959 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=46 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=259.46378 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=123 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=413.15613 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=406 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@168293b4 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3... 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1510 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39248691482 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=101594397 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,963 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=6820 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.4 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=3 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=1.6 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:SentBytes=3872 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=17 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=17 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=4 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=66.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=4 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,964 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-... 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=182319 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.07407407407407407 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=9 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.037037037037037 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:SentBytes=62175 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=95 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=95 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:49:54,965 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1... 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:DfsUsed=100184064 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:Remaining=39382851584 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1519 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:49:54,966 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=46 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=259.47433 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=833 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=414.57516 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=406 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=2 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=3 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=11 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=12 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:49:54,970 INFO [pool-5-thread-1] NameNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3625 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1559 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=0.96 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=600 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2081 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=38576.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=190 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2075 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=279951.76470588235 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=230 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52624181 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.9 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=190 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1170 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1170 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=59713967 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=190 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=11977.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=600 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2075 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.93 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=155 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5063 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2075 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5942.685714285714 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=4597.583333333333 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersKilled=5 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=6 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=56.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94833103 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=6 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=1 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7... 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:DfsUsed=59065605 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:Remaining=39248584986 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:49:54,971 INFO [pool-5-thread-1] NameNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1559 2018-07-21T05:49:54,972 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:49:54,972 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:49:54,972 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:49:54,972 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:49:54,972 INFO [pool-5-thread-1] NameNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsNumOps=13 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.666666666666666 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:49:54,973 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3545 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1496 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=0.875 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=600 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2043 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=55765.666666666664 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=187 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2035 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=282546.6538461539 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2912893 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.2857142857142857 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=187 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=413 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=413 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49383234 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=187 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=7679.642857142857 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=600 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2035 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.835 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4823 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2035 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=5254.5 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5233.214285714285 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=1.4 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:54,974 INFO [pool-5-thread-1] NameNodeMetricsLog: << End NameNode metrics dump 2018-07-21T05:49:55,338 INFO [pool-12-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1797"}] 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":253,"usedSpace":4326661,"freeSpace":19528515866,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":264,"usedSpace":54738944,"freeSpace":19662716928,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=178908 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.15046296296296297 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=3 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0003_000001":1} 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.4652777777777778 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=139321 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1538 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=3 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1538 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=28.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=3 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=3 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=7010.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=18008 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=18008 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=788 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=704 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=310917666 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=704 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157140075342 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=164526491102 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=239 71 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14905 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=18008 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=146.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,339 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@7075498 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1559 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39189864730 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=59065605 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=4 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=6.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=12259455 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.1702970297029703 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=109 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.1495049504950495 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4428474 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=27776 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=27776 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=46 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=259.38776 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=908 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=408.87292 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=406 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.125 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=87 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=2 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=0.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":270,"usedSpace":4395008,"freeSpace":19661815808,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":270,"usedSpace":45678592,"freeSpace":19527590170,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=2 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=11 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=12 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:49:55,340 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@6deffd8c 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1496 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39188791578 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50073600 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":279,"usedSpace":56094502,"freeSpace":19661438976,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":250,"usedSpace":45499895,"freeSpace":19527213338,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=46 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=259.41162 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=516 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=408.87292 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=406 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=344993 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.3333333333333333 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=12 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=3542 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=12 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=12 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=12 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=6 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=6 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=2.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=5.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3104 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14904 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.14473684210526316 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.06 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.058333333333333334 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3640 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=362 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=35 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=12035 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2732 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1391 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=18008 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:49:55,341 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8229 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2732 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2030 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=704 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2392 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1032 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2853 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=723 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=230008 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=1.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=7 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=6.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=2083 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=7 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=7 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=7 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=101594397 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39248855322 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1510 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3486 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1519 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=0.87 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2056 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=1584.625 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=194 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2045 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=2281611.409090909 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195848907 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5555555555555556 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=194 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3346 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3346 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100865053 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=194 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=28583.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=600 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2045 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.855 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=210 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5633 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2045 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=58028.18518518518 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=7652.75 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=12.0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:49:55,342 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":265,"usedSpace":44896256,"freeSpace":19660529664,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":261,"usedSpace":55287808,"freeSpace":19660521472,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.613132 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.092228845 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=704 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164526491102 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.092228845 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":100184064,"adminState":"In Service","nonDfsUsedSpace":41109106688,"capacity":84278861824,"numBlocks":526,"version":"3.1.0","used":100184064,"remaining":39385743360,"blockScheduled":0,"blockPoolUsed":100184064,"blockPoolUsedPercent":0.11887211,"volfails":0,"lastBlockReport":29},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":50073600,"adminState":"In Service","nonDfsUsedSpace":41159217152,"capacity":84278861824,"numBlocks":539,"version":"3.1.0","used":50073600,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":50073600,"blockPoolUsedPercent":0.059414186,"volfails":0,"lastBlockReport":29},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":0,"usedSpace":59065605,"adminState":"In Service","nonDfsUsedSpace":41150470907,"capacity":84278861824,"numBlocks":516,"version":"3.1.0","used":59065605,"remaining":39251280154,"blockScheduled":1,"blockPoolUsed":59065605,"blockPoolUsedPercent":0.070083536,"volfails":0,"lastBlockReport":29},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":101594397,"adminState":"In Service","nonDfsUsedSpace":41107696355,"capacity":84278861824,"numBlocks":528,"version":"3.1.0","used":101594397,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":101594397,"blockPoolUsedPercent":0.12054553,"volfails":0,"lastBlockReport":29}} 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:49:55,343 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"18008"} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157140075342 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=310917666 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=603 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=310917666 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50073600 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39248806170 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1496 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@249a9827 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1519 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39319609344 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100184064 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=704 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,344 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=704 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:49:55-0700","windows":[{"ops":[],"windowLenMs":60000},{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":112}],"totalCount":112},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":65}],"totalCount":65},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":6}],"totalCount":6}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1320}],"totalCount":1320},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"*","topUsers":[{"user":"hiveptest","count":10856}],"totalCount":10856},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1361}],"totalCount":1361},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2119}],"totalCount":2119},{"opType":"rename","topUsers":[{"user":"hiveptest","count":661}],"totalCount":661},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2038}],"totalCount":2038},{"opType":"create","topUsers":[{"user":"hiveptest","count":2649}],"totalCount":2649},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"open","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678}],"windowLenMs":1500000}]} 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=310917666 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157140075342 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=239 71 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=788 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14905 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=3 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.46153846153846156 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=3.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1531 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=0.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=4 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=33 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2161 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.05405405405405406 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.25 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2392 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8229 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2732 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.8333333333333334 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1352 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.25 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=2.272727272727273 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2804 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=35 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=723 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=12 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=2.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.09090909090909091 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.2727272727272727 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.5454545454545454 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=7 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2732 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1409 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.45454545454545453 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=2400 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=51 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.08375 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=46 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:49:55,345 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=411.7371 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=406 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=7382 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.25 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=4 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3964 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=20 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=20 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=46 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=411.7371 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=406 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3550 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1510 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=0.76 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2049 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=34722.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=183 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2038 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1952077.5384615385 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95568293 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.18181818181818182 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=183 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1807 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1807 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=102014337 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=183 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=8491.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=600 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2038 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.725 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=212 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5616 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2038 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=25693.243243243243 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5264.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=1.6 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=17 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6171 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=20 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:49:55,346 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=46 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=123 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=413.15613 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=406 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@7d2a0c72 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1510 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39182274842 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=101594397 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=6820 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.4 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=3 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=1.6 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=3872 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=17 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=17 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=4 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=66.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=4 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:49:55,347 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532177295524,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532177294511,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=182319 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.07407407407407407 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=9 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.037037037037037 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=62175 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=95 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=95 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100184064 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39382851584 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1519 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=46 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=259.47433 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=833 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=414.57516 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=406 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=2 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=3 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=11 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=12 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3625 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1559 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=0.96 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2081 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=38576.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=190 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2075 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=279951.76470588235 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=230 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52624181 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.9 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=190 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:49:55,348 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1170 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1170 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=59713967 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=190 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=11977.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=600 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2075 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.93 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=155 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5063 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2075 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5942.685714285714 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=4597.583333333333 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=5 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=6 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=56.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94833103 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=6 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=1 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=59065605 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39248584986 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1559 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=13 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.666666666666666 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3545 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1496 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=0.875 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2043 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=55765.666666666664 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=187 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2035 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=282546.6538461539 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2912893 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.2857142857142857 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=187 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=413 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=413 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49383234 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=187 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=7679.642857142857 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=600 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2035 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.835 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4823 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2035 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=5254.5 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,349 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5233.214285714285 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=1.4 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,350 INFO [pool-12-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:49:55,679 INFO [pool-19-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":253,"usedSpace":4326661,"freeSpace":19556602138,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":264,"usedSpace":54738944,"freeSpace":19690819584,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=178908 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.15046296296296297 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=3 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0003_000001":1} 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.4652777777777778 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=139321 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1538 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=3 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1538 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=28.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=3 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=3 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=7010.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=18008 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=18008 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=788 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=704 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=310917666 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=704 2018-07-21T05:49:55,680 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157140075342 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=164526491102 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=239 71 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14905 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=18008 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=146.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@329dbf24 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1559 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39247380762 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=59065605 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=4 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=6.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=12259455 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.1702970297029703 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=109 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.1495049504950495 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4428474 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=27776 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=27776 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=46 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=259.38776 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=908 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=408.87292 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=406 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.125 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=3 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=87 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=3 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=2 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=0.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":270,"usedSpace":4395008,"freeSpace":19690786816,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":270,"usedSpace":45678592,"freeSpace":19556569370,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:49:55,681 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=2 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=3 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=11 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=12 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@4350ce49 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1496 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39247339802 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50073600 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":279,"usedSpace":56094502,"freeSpace":19690778624,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":250,"usedSpace":45499895,"freeSpace":19556561178,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=46 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=259.41162 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=516 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=408.87292 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=406 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=344993 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.3333333333333333 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=12 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=3542 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=12 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=12 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=12 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=6 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=6 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=2.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=5.0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3104 2018-07-21T05:49:55,682 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14904 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.14473684210526316 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.06 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.058333333333333334 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3640 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=362 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=35 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=12035 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2732 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1391 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=18008 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8229 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2732 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2030 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=704 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2392 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1032 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2853 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=723 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=230008 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=1.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=7 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=6.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=2083 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=7 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=7 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=7 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=101594397 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39248855322 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1510 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3486 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1519 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=0.87 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2056 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=1584.625 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=194 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2045 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=2281611.409090909 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195848907 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5555555555555556 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=194 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3346 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3346 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100865053 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=194 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=28583.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=600 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2045 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.855 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=210 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5633 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2045 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=58028.18518518518 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=7652.75 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=12.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":265,"usedSpace":44896256,"freeSpace":19690741760,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":261,"usedSpace":55287808,"freeSpace":19690741760,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:49:55,683 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.613132 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.092228845 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=704 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164526491102 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.092228845 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":1,"usedSpace":100184064,"adminState":"In Service","nonDfsUsedSpace":41109106688,"capacity":84278861824,"numBlocks":526,"version":"3.1.0","used":100184064,"remaining":39385743360,"blockScheduled":0,"blockPoolUsed":100184064,"blockPoolUsedPercent":0.11887211,"volfails":0,"lastBlockReport":29},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":1,"usedSpace":50073600,"adminState":"In Service","nonDfsUsedSpace":41159217152,"capacity":84278861824,"numBlocks":539,"version":"3.1.0","used":50073600,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":50073600,"blockPoolUsedPercent":0.059414186,"volfails":0,"lastBlockReport":29},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":59065605,"adminState":"In Service","nonDfsUsedSpace":41150470907,"capacity":84278861824,"numBlocks":516,"version":"3.1.0","used":59065605,"remaining":39251280154,"blockScheduled":1,"blockPoolUsed":59065605,"blockPoolUsedPercent":0.070083536,"volfails":0,"lastBlockReport":29},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":1,"usedSpace":101594397,"adminState":"In Service","nonDfsUsedSpace":41107696355,"capacity":84278861824,"numBlocks":528,"version":"3.1.0","used":101594397,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":101594397,"blockPoolUsedPercent":0.12054553,"volfails":0,"lastBlockReport":29}} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"18008"} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157140075342 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=310917666 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=603 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=310917666 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50073600 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39248806170 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:49:55,684 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1496 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@4cafa5c3 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1519 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39381442560 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100184064 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=704 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=704 2018-07-21T05:49:55,685 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:49:55-0700","windows":[{"ops":[],"windowLenMs":60000},{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":112}],"totalCount":112},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":65}],"totalCount":65},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":6}],"totalCount":6}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1320}],"totalCount":1320},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"*","topUsers":[{"user":"hiveptest","count":10856}],"totalCount":10856},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1361}],"totalCount":1361},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2119}],"totalCount":2119},{"opType":"rename","topUsers":[{"user":"hiveptest","count":661}],"totalCount":661},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2038}],"totalCount":2038},{"opType":"create","topUsers":[{"user":"hiveptest","count":2649}],"totalCount":2649},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"open","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678}],"windowLenMs":1500000}]} 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=310917666 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157140075342 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=239 71 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=788 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14905 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=3 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.46153846153846156 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=3.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1531 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=33 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2161 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.05405405405405406 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.25 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2392 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8229 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2732 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.8333333333333334 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1352 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.25 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=2.272727272727273 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2804 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=35 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=723 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=12 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=2.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.09090909090909091 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.2727272727272727 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.5454545454545454 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=7 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2732 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1409 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.45454545454545453 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=2400 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=51 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.08375 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=46 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=411.7371 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=406 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=7382 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.25 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=4 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3964 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=20 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=20 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=46 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=411.7371 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=406 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3550 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1510 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=0.76 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2049 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=34722.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=183 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2038 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1952077.5384615385 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95568293 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.18181818181818182 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=183 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1807 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1807 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=102014337 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=183 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=8491.0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=600 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2038 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.725 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=212 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,686 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5616 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2038 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=25693.243243243243 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5264.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=1.6 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=17 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6171 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=20 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=46 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=259.46378 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=123 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=413.15613 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=406 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@1ff7e953 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1510 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39247118618 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=101594397 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=6820 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.4 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=3 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=1.6 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=3872 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=17 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=17 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=4 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=66.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=4 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:49:55,687 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532177295524,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532177294511,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=182319 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.07407407407407407 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=9 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.037037037037037 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=62175 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=95 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=95 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100184064 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39382851584 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1519 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=46 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=259.47433 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=833 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=414.57516 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=406 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=2 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=3 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=11 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=12 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3625 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1559 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=0.96 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2081 2018-07-21T05:49:55,688 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=38576.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=190 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2075 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=279951.76470588235 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=230 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52624181 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.9 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=190 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1170 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1170 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=59713967 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=190 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=11977.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=600 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2075 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.93 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=155 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5063 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2075 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5942.685714285714 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=4597.583333333333 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=5 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=6 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=56.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94833103 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=6 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=1 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=59065605 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39248584986 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1559 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=13 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.666666666666666 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3545 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1496 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=0.875 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=600 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2043 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=55765.666666666664 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=187 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2035 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=282546.6538461539 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2912893 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.2857142857142857 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=187 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=413 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=413 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49383234 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=187 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=7679.642857142857 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=600 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2035 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.835 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4823 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2035 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=5254.5 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5233.214285714285 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=1.4 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:55,689 INFO [pool-19-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:49:56,382 INFO [pool-33-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"1","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1798"}] 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":253,"usedSpace":4326661,"freeSpace":19556376858,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":264,"usedSpace":54738944,"freeSpace":19690594304,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=178908 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.15046296296296297 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=3 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0003_000001":1} 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.4652777777777778 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=139321 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1538 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=3 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1538 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=28.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=3 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=3 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=7010.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=18008 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=18008 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=788 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=704 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=310917666 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=704 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157140075342 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=164526491102 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=239 71 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14905 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=18008 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=146.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,383 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@199e5346 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1559 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39246930202 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=59065605 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=4 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=6.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=12259455 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.1702970297029703 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=109 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.1495049504950495 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4428474 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=27776 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=27776 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=46 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=259.38776 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=908 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=408.87292 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=406 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.125 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=3 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=87 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=3 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=2 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=0.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1799"}] 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":270,"usedSpace":4395008,"freeSpace":19690565632,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":270,"usedSpace":45678592,"freeSpace":19556348186,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,384 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=2 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=3 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=11 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=12 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@62b4d65a 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1496 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39246897434 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50073600 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1799"}] 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":279,"usedSpace":56094502,"freeSpace":19690553344,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":250,"usedSpace":45499895,"freeSpace":19556335898,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=46 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=259.41162 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=516 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=408.87292 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=406 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,385 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=344993 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.3333333333333333 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=12 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=3542 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=12 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=12 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=12 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=6 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=6 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=2.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=5.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3104 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14904 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.14473684210526316 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.06 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.058333333333333334 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3640 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=362 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=35 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=12035 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2732 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1391 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=18008 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8229 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2732 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2030 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=704 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2392 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1032 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2853 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=723 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=230008 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=1.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=7 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=6.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=2083 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=7 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=7 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=7 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=101594397 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39248855322 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1510 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3486 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1519 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=0.87 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2056 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=1584.625 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=194 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2045 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=2281611.409090909 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195848907 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5555555555555556 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=194 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3346 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3346 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100865053 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=194 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=28583.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=600 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2045 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.855 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=210 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5633 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2045 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=58028.18518518518 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=7652.75 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=12.0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:49:56,386 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1799"}] 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":265,"usedSpace":44896256,"freeSpace":19690516480,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":261,"usedSpace":55287808,"freeSpace":19690516480,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,387 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.613132 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.092228845 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=704 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164526491102 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.092228845 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":2,"usedSpace":100184064,"adminState":"In Service","nonDfsUsedSpace":41109106688,"capacity":84278861824,"numBlocks":526,"version":"3.1.0","used":100184064,"remaining":39385743360,"blockScheduled":0,"blockPoolUsed":100184064,"blockPoolUsedPercent":0.11887211,"volfails":0,"lastBlockReport":29},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":2,"usedSpace":50073600,"adminState":"In Service","nonDfsUsedSpace":41159217152,"capacity":84278861824,"numBlocks":539,"version":"3.1.0","used":50073600,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":50073600,"blockPoolUsedPercent":0.059414186,"volfails":0,"lastBlockReport":29},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":1,"usedSpace":59065605,"adminState":"In Service","nonDfsUsedSpace":41150470907,"capacity":84278861824,"numBlocks":516,"version":"3.1.0","used":59065605,"remaining":39251280154,"blockScheduled":1,"blockPoolUsed":59065605,"blockPoolUsedPercent":0.070083536,"volfails":0,"lastBlockReport":29},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":2,"usedSpace":101594397,"adminState":"In Service","nonDfsUsedSpace":41107696355,"capacity":84278861824,"numBlocks":528,"version":"3.1.0","used":101594397,"remaining":39251525914,"blockScheduled":1,"blockPoolUsed":101594397,"blockPoolUsedPercent":0.12054553,"volfails":0,"lastBlockReport":29}} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"18008"} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=157140075342 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=310917666 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=603 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=310917666 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50073600 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39248806170 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1496 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@5743ab9a 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1519 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39381000192 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100184064 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=704 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,388 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=704 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:49:56-0700","windows":[{"ops":[],"windowLenMs":60000},{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":112}],"totalCount":112},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":65}],"totalCount":65},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":6}],"totalCount":6}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1320}],"totalCount":1320},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"*","topUsers":[{"user":"hiveptest","count":10856}],"totalCount":10856},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1361}],"totalCount":1361},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2119}],"totalCount":2119},{"opType":"rename","topUsers":[{"user":"hiveptest","count":661}],"totalCount":661},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2038}],"totalCount":2038},{"opType":"create","topUsers":[{"user":"hiveptest","count":2649}],"totalCount":2649},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"open","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678}],"windowLenMs":1500000}]} 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=310917666 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=157140075342 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=239 71 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=788 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14905 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=3 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.46153846153846156 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=3.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1531 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=0.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=4 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=33 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2161 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.05405405405405406 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.25 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2392 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8229 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2732 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.8333333333333334 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1352 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.25 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=2.272727272727273 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2804 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=35 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=723 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=12 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=2.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.09090909090909091 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.2727272727272727 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.5454545454545454 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=7 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2732 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1409 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.45454545454545453 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=2400 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=51 2018-07-21T05:49:56,389 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.08375 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=46 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=411.7371 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=406 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=7382 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.25 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=4 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3964 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=20 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=20 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=46 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=411.7371 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=406 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3550 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1510 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=0.76 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2049 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=34722.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=183 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2038 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1952077.5384615385 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95568293 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.18181818181818182 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=183 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1807 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1807 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=102014337 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=183 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=8491.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=600 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2038 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.725 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=212 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5616 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2038 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=25693.243243243243 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5264.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,390 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=1.6 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=17 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6171 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=20 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=46 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=123 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=413.15613 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=406 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@e393c5c 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1510 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39246676250 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=101594397 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=6820 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.4 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=3 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=1.6 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=3872 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=17 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=17 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=4 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=66.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=4 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:49:56,391 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532177295524,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532177294511,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=182319 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.07407407407407407 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=9 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.037037037037037 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=62175 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=95 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=95 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100184064 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39382851584 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1519 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=46 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=259.47433 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=833 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=414.57516 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=406 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=2 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=3 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=11 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=12 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3625 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1559 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=0.96 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2081 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=38576.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:49:56,392 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=190 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2075 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=279951.76470588235 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=230 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52624181 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.9 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=190 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1170 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1170 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=59713967 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=190 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=11977.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=600 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2075 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.93 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=155 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5063 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2075 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5942.685714285714 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=4597.583333333333 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=5 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=6 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=56.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94833103 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=6 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=1 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=59065605 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39248584986 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1559 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=13 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.666666666666666 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3545 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1496 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=0.875 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2043 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=55765.666666666664 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=187 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2035 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=282546.6538461539 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2912893 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.2857142857142857 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=187 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=413 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=413 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49383234 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=187 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=7679.642857142857 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=600 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2035 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.835 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4823 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2035 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=5254.5 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5233.214285714285 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=1.4 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,393 INFO [pool-33-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:49:56,943 INFO [pool-46-thread-1] DataNodeMetricsLog: >> Begin DataNode metrics dump 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:HttpPort=null 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XceiverCount=3 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"2","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1799"}] 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:ClusterId=testClusterID 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7":{"numBlocks":253,"usedSpace":4326661,"freeSpace":19512418586,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8":{"numBlocks":264,"usedSpace":54738944,"freeSpace":19646636032,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:RpcPort=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:DataPort=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SoftwareVersion=3.1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:SlowDisks=null 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:XmitsInProgress=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-3:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:ReceivedBytes=178908 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeAvgTime=0.15046296296296297 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcSlowCalls=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationSuccesses=3 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumDroppedConnections=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.NumOpenConnectionsPerUser={"appattempt_1532175606211_0003_000001":1} 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcClientBackoff=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeAvgTime=0.4652777777777778 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthorizationFailures=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:NumOpenConnections=1 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.port=60399 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:SentBytes=139321 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcProcessingTimeNumOps=1538 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationSuccesses=3 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:CallQueueLength=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Context=rpc 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcAuthenticationFailures=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:RpcQueueTimeNumOps=1538 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort60399:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayAvgTime=28.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumShutdownNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumActiveNMs=2 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumRebootedNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayNumOps=3 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMLaunchDelayNumOps=3 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissioningNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.ClusterMetrics=ResourceManager 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumDecommissionedNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Context=yarn 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumUnhealthyNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:AMRegisterDelayAvgTime=7010.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: ClusterMetrics:NumLostNMs=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsCount=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeTotal=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointPercentComplete=1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageTotal=1 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsTotal=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointCount=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:ElapsedTime=619 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsPercentComplete=1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingEditsElapsedTime=7 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointElapsedTime=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModePercentComplete=1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:PercentComplete=1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SavingCheckpointTotal=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeCount=0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:SafeModeElapsedTime=18 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageElapsedTime=57 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImageCount=1 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: StartupProgress:LoadingFsImagePercentComplete=1.0 2018-07-21T05:49:56,944 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDataNodeMessageCount=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastWrittenTransactionId=18008 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:SnapshottableDirectories=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEncryptionZones=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReconstructionBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LastCheckpointTime=1532175592000 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumActiveClients=1 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastLogRoll=18008 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumTimedOutPendingReconstructions=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:VolumeFailuresTotal=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ProvidedCapacityTotal=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingECBlockGroups=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedGB=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecommissioningDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.HAState=active 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyECBlockGroups=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:FilesTotal=788 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ScheduledReplicationBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureECBlockGroups=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptECBlockGroups=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MillisSinceLastLoadedEdits=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalECBlockGroups=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExcessBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalLoad=10 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:ExpiredHeartbeats=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumStaleStorages=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlocksTotal=704 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:UnderReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsed=310917666 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalReplicatedBlocks=704 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:StaleDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumFilesUnderConstruction=1 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemaining=157140075342 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PostponedMisreplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityUsedNonDFS=164526491102 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LockQueueLength=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CorruptReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Context=dfs 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomDeadDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:Snapshots=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingDeletionECBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:tag.TotalSyncTimes=239 71 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:PendingReplicationBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotal=337115447296 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplOneBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDeadDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TotalSyncCount=14905 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:TransactionsSinceLastCheckpoint=18008 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:MissingReplicationOneBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumLiveDataNodes=4 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:BlockCapacity=4194304 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityTotalGB=314.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:LowRedundancyBlocks=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:NumDecomLiveDataNodes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystem:CapacityRemainingGB=146.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalDataFileIos=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalFileIoErrors=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:DataFileIoRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:FlushIoRateNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Context=dfs 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:TotalMetadataOperations=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumFailedVolumes=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheCapacity=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:CacheUsed=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:FailedStorageLocations=[Ljava.lang.String;@330da16e 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksCached=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:LastVolumeFailureDate=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Capacity=84278861824 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToUncache=1559 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:Remaining=39159013658 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:DfsUsed=59065605 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-abcf8c77-f058-404d-990a-8a939ff08e3b:NumBlocksFailedToCache=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.port=45107 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersNumOps=4 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersNumOps=3 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Context=rpcdetailed 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StopContainersAvgTime=3.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:StartContainersAvgTime=6.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort45107:SignalToContainerNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:ReceivedBytes=12259455 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeAvgTime=0.1702970297029703 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcSlowCalls=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationSuccesses=109 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumDroppedConnections=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.NumOpenConnectionsPerUser={"hiveptest":1} 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcClientBackoff=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeAvgTime=0.1495049504950495 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthorizationFailures=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:NumOpenConnections=1 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.port=35925 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:SentBytes=4428474 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcProcessingTimeNumOps=27776 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:CallQueueLength=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Context=rpc 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcAuthenticationFailures=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:RpcQueueTimeNumOps=27776 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35925:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemMaxM=1820.5 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTerminated=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsWaiting=46 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.SessionId=null 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Context=jvm 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapUsedM=259.38776 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTotalExtraSleepTime=908 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCount=27 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsBlocked=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogWarn=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsRunnable=151 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogInfo=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS Scavenge=22 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsNew=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapUsedM=408.87292 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:ThreadsTimedWaiting=406 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcCountPS MarkSweep=5 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:MemHeapMaxM=1820.5 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogError=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:tag.ProcessName=DataNode 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:LogFatal=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcTimeMillis=1971 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-1:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.port=59658 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportAvgTime=1.125 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationNumOps=3 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:SubmitApplicationAvgTime=1.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Context=rpcdetailed 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetApplicationReportNumOps=87 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationNumOps=3 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:GetNewApplicationAvgTime=0.0 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationNumOps=2 2018-07-21T05:49:56,945 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59658:ForceKillApplicationAvgTime=0.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:HttpPort=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XceiverCount=3 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1800"}] 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:ClusterId=testClusterID 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5":{"numBlocks":270,"usedSpace":4395008,"freeSpace":19646603264,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6":{"numBlocks":270,"usedSpace":45678592,"freeSpace":19512385818,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:RpcPort=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:DataPort=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SoftwareVersion=3.1.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:SlowDisks=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:XmitsInProgress=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-2:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.port=59862 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Context=rpcdetailed 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:MissingECBlockGroups=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:LowRedundancyECBlockGroups=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:BytesInFutureECBlockGroups=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:CorruptECBlockGroups=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:PendingDeletionECBlocks=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: ECBlockGroupsState:TotalECBlockGroups=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitVCores=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsCompleted=2 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersPreempted=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedContainers=1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableVCores=15 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsSubmitted=3 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingMB=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedCapacity=0.125 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AvailableMB=896 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveApplications=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersReleased=11 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsKilled=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedMB=128 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Context=yarn 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedContainers=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateContainersAllocated=12 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingVCores=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AllocatedVCores=1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedMB=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ReservedVCores=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_1440=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsFailed=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_60=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_0=1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:ActiveUsers=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:PendingContainers=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceVCores=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsPending=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:tag.Queue=root 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:UsedAMResourceMB=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AMResourceLimitMB=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:running_300=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root:AppsRunning=1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumFailedVolumes=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheCapacity=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:CacheUsed=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:FailedStorageLocations=[Ljava.lang.String;@40dda6b1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksCached=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:LastVolumeFailureDate=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Capacity=84278861824 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToUncache=1496 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:Remaining=39158972698 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:DfsUsed=50073600 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-425f2bd0-2dbf-418a-a2b5-a7e216df54e6:NumBlocksFailedToCache=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:HttpPort=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XceiverCount=3 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1800"}] 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:ClusterId=testClusterID 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3":{"numBlocks":279,"usedSpace":56094502,"freeSpace":19646595072,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4":{"numBlocks":250,"usedSpace":45499895,"freeSpace":19512377626,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":134217446}} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:RpcPort=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:DataPort=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SoftwareVersion=3.1.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:SlowDisks=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:XmitsInProgress=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo-1:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotNumOps=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishNumOps=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:SnapshotAvgTime=0.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Context=metricssystem 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSources=63 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:DroppedPubAll=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:PublishAvgTime=0.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSources=63 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumAllSinks=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: MetricsSystem,sub=Stats:NumActiveSinks=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemMaxM=1820.5 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTerminated=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsWaiting=46 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.SessionId=null 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Context=jvm 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapUsedM=259.41162 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTotalExtraSleepTime=516 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCount=27 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsBlocked=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogWarn=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsRunnable=151 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,946 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogInfo=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS Scavenge=22 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsNew=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapUsedM=408.87292 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:ThreadsTimedWaiting=406 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcCountPS MarkSweep=5 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:MemHeapMaxM=1820.5 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogError=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:tag.ProcessName=DataNode 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:LogFatal=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcTimeMillis=1971 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-2:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalDataFileIos=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalFileIoErrors=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:DataFileIoRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:FlushIoRateNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Context=dfs 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:TotalMetadataOperations=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:ReceivedBytes=344993 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeAvgTime=0.3333333333333333 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcSlowCalls=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationSuccesses=12 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumDroppedConnections=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcClientBackoff=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeAvgTime=3.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthorizationFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:NumOpenConnections=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.port=44235 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:SentBytes=3542 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcProcessingTimeNumOps=12 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationSuccesses=12 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:CallQueueLength=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Context=rpc 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcAuthenticationFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:RpcQueueTimeNumOps=12 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort44235:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.port=44235 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersNumOps=6 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersNumOps=6 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Context=rpcdetailed 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StopContainersAvgTime=2.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:StartContainersAvgTime=5.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort44235:SignalToContainerNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsBatchedInSync=3104 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsNumOps=14904 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SyncsAvgTime=0.14473684210526316 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteSnapshotOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsAvgTime=0.06 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AllowSnapshotOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:RenameSnapshotOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeAvgTime=0.058333333333333334 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesCreated=3640 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesAppended=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CacheReportAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ResourceCheckTimeNumOps=362 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:NumTimesReReplicationNotScheduled=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SafeModeTime=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetListingOps=35 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.ProcessName=NameNode 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetImageNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TotalFileOps=12035 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSymlinkOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:AddBlockOps=2732 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DisallowSnapshotOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:DeleteFileOps=1391 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TransactionsNumOps=18008 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateSnapshotOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:ListSnapshottableDirOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockReceivedAndDeletedOps=8229 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.SessionId=null 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FsImageLoadTime=128 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:CreateFileOps=2732 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Context=dfs 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesRenamed=2030 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:TimeoutReReplications=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsQueued=1 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:PutImageAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesInGetListingOps=704 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FileInfoOps=2392 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetLinkTargetOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetAdditionalDatanodeOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:BlockOpsBatched=1032 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesDeleted=2853 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SuccessfulReReplications=494 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetBlockLocations=723 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:FilesTruncated=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:SnapshotDiffReportOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportNumOps=8 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GenerateEDEKTimeNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:GetEditAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:StorageBlockReportAvgTime=0.25 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeActivity:WarmUpEDEKTimeAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:ReceivedBytes=230008 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeAvgTime=1.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcSlowCalls=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationSuccesses=7 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumDroppedConnections=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcClientBackoff=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeAvgTime=6.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthorizationFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:NumOpenConnections=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.port=45107 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:SentBytes=2083 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcProcessingTimeNumOps=7 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationSuccesses=7 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:CallQueueLength=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Context=rpc 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcAuthenticationFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:RpcQueueTimeNumOps=7 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort45107:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.port=38228 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Context=rpcdetailed 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumFailedVolumes=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheUsed=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:CacheCapacity=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksCached=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:LastVolumeFailureDate=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Capacity=84278861824 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Context=FSDatasetState 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:DfsUsed=101594397 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:Remaining=39248855322 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToCache=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState:NumBlocksFailedToUnCache=1510 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksWrite=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosNumOps=3486 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRemoved=1519 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalAvgTime=0.87 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CacheReportsNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksCached=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsAvgTime=70.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DataNodeActiveXceiversCount=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsNumOps=2056 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesRead=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpAvgTime=1584.625 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionTasks=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncCount=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpNumOps=194 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromLocalClient=2045 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:VolumeFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesRead=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:PacketAckRoundTripTimeNanosAvgTime=2281611.409090909 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalReadTime=675 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesRead=195848907 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:IncrementalBlockReportsAvgTime=0.5555555555555556 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromRemoteClient=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksUncached=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:DatanodeNetworkErrors=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadsFromLocalClient=194 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksInPendingIBR=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockVerificationFailures=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosNumOps=3346 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBytesWrite=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosNumOps=3346 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.SessionId=null 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockReportsNumOps=1 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BytesWritten=100865053 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksRead=194 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReplicated=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Context=dfs 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksVerified=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WritesFromRemoteClient=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketTransferNanosAvgTime=28583.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsNumOps=600 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksWritten=2045 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:HeartbeatsAvgTime=0.855 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:TotalWriteTime=210 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcDecodingTimeNanos=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:LifelinesNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosNumOps=5633 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:WriteBlockOpNumOps=2045 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FlushNanosAvgTime=58028.18518518518 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:RemoteBytesWritten=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:SendDataPacketBlockedOnNetworkNanosAvgTime=7652.75 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:ReadBlockOpAvgTime=12.0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:CopyBlockOpNumOps=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,947 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-33099:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:LastHATransitionTime=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:HostAndPort=localhost:35925 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:State=active 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowPeersReport=null 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SlowDisksReport=null 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:NNRole=NameNode 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:BytesWithFutureGenerationStamps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeStatus:SecurityEnabled=false 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:HttpPort=null 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XceiverCount=1 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SendPacketDownstreamAvgInfo=null 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:BPServiceActorInfo=[{"maxDataLength":"67108864","LastHeartbeat":"0","ActorState":"RUNNING","NamenodeAddress":"localhost:35925","BlockPoolID":"BP-2062454585-10.128.0.18-1532175592633","maxBlockReportSize":"0","LastBlockReport":"1800"}] 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:VolumeInfo={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1":{"numBlocks":265,"usedSpace":44896256,"freeSpace":19646558208,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0},"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2":{"numBlocks":261,"usedSpace":55287808,"freeSpace":19646558208,"reservedSpace":0,"storageType":"DISK","reservedSpaceForReplicas":0}} 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DatanodeHostname=127.0.0.1 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DiskBalancerStatus={"currentState":[],"result":"NO_PLAN","planID":"","planFile":""} 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:RpcPort=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:DataPort=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:SlowDisks=null 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:XmitsInProgress=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeInfo:NamenodeAddresses={"localhost":"BP-2062454585-10.128.0.18-1532175592633"} 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalDataFileIos=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalFileIoErrors=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:DataFileIoRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:FlushIoRateNumOps=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Context=dfs 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:TotalMetadataOperations=0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,948 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfSnapshottableDirs=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheCapacity=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CacheUsed=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ProvidedCapacity=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DistinctVersionCount=1 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocksWithReplicationFactorOne=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentRemaining=46.532227 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Version=3.1.0, r16b70619a24cdcf5d3b0fcf4b58ca77238ccbe6d 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentUsed=0.092228845 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:TotalBlocks=704 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NonDfsUsedSpace=164799235550 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Safemode= 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:UpgradeFinalized=true 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:EnteringMaintenanceNodes={} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:PercentBlockPoolUsed=0.092228845 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:LiveNodes={"127.0.0.1:33099":{"infoAddr":"127.0.0.1:46305","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:33099","lastContact":0,"usedSpace":100184064,"adminState":"In Service","nonDfsUsedSpace":41200021504,"capacity":84278861824,"numBlocks":526,"version":"3.1.0","used":100184064,"remaining":39294828544,"blockScheduled":0,"blockPoolUsed":100184064,"blockPoolUsedPercent":0.11887211,"volfails":0,"lastBlockReport":30},"127.0.0.1:40780":{"infoAddr":"127.0.0.1:43939","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:40780","lastContact":0,"usedSpace":50073600,"adminState":"In Service","nonDfsUsedSpace":41250131968,"capacity":84278861824,"numBlocks":539,"version":"3.1.0","used":50073600,"remaining":39160611098,"blockScheduled":1,"blockPoolUsed":50073600,"blockPoolUsedPercent":0.059414186,"volfails":0,"lastBlockReport":30},"127.0.0.1:45625":{"infoAddr":"127.0.0.1:50043","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:45625","lastContact":2,"usedSpace":59065605,"adminState":"In Service","nonDfsUsedSpace":41150470907,"capacity":84278861824,"numBlocks":516,"version":"3.1.0","used":59065605,"remaining":39251280154,"blockScheduled":1,"blockPoolUsed":59065605,"blockPoolUsedPercent":0.070083536,"volfails":0,"lastBlockReport":29},"127.0.0.1:52570":{"infoAddr":"127.0.0.1:42302","infoSecureAddr":"127.0.0.1:0","xferaddr":"127.0.0.1:52570","lastContact":0,"usedSpace":101594397,"adminState":"In Service","nonDfsUsedSpace":41198611171,"capacity":84278861824,"numBlocks":528,"version":"3.1.0","used":101594397,"remaining":39160611098,"blockScheduled":1,"blockPoolUsed":101594397,"blockPoolUsedPercent":0.12054553,"volfails":0,"lastBlockReport":30}} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CompileInfo=2018-04-03T04:00Z by wtan from branch-3.1.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:ClusterId=testClusterID 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolId=BP-2062454585-10.128.0.18-1532175592633 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DeadNodes={} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:JournalTransactionInfo={"MostRecentCheckpointTxId":"0","LastAppliedOrWrittenTxId":"18008"} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Free=156867330894 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:BlockPoolUsedSpace=310917666 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Threads=603 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NumberOfMissingBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirStatuses={"active":{"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":"IMAGE_AND_EDITS","/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":"IMAGE_AND_EDITS"},"failed":{}} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:SoftwareVersion=3.1.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Used=310917666 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:Total=337115447296 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NodeUsage={"nodeUsage":{"min":"0.06%","median":"0.12%","max":"0.12%","stdDev":"0.03%"}} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NNStartedTimeInMillis=1532175593903 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:DecomNodes={} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:CorruptFiles=[] 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameDirSize={"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1":646,"/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2":646} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: NameNodeInfo:NameJournalStatus=[{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-1/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"},{"manager":"FileJournalManager(root=/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2)","stream":"EditLogFileOutputStream(/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/name-0-2/current/edits_inprogress_0000000000000000001)","disabled":"false","required":"false"}] 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumFailedVolumes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheUsed=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:CacheCapacity=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksCached=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:LastVolumeFailureDate=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Capacity=84278861824 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Context=FSDatasetState 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6]'} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:DfsUsed=50073600 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:Remaining=39248806170 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToCache=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-2:NumBlocksFailedToUnCache=1496 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:ReceivedBytes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcSlowCalls=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumDroppedConnections=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcClientBackoff=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthorizationFailures=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:NumOpenConnections=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.port=35555 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:SentBytes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:CallQueueLength=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Context=rpc 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcAuthenticationFailures=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort35555:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumFailedVolumes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheCapacity=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:CacheUsed=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:FailedStorageLocations=[Ljava.lang.String;@751f543b 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksCached=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:LastVolumeFailureDate=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Capacity=84278861824 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToUncache=1519 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:Remaining=39293075456 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:DfsUsed=100184064 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-8f95c842-a0c9-4e85-941e-02356267dd5f:NumBlocksFailedToCache=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalDataFileIos=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalFileIoErrors=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:DataFileIoRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:FlushIoRateNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Context=dfs 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:TotalMetadataOperations=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:PendingDeletionReplicatedBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicationOneBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:LowRedundancyReplicatedBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:CorruptReplicatedBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:MissingReplicatedBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:BytesInFutureReplicatedBlocks=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: ReplicatedBlocksState:TotalReplicatedBlocks=704 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:ReceivedBytes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcSlowCalls=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumDroppedConnections=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcClientBackoff=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthorizationFailures=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:NumOpenConnections=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.port=59862 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:SentBytes=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:CallQueueLength=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Context=rpc 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcAuthenticationFailures=0 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,949 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59862:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlocksTotal=704 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TopUserOpCounts={"timestamp":"2018-07-21T05:49:56-0700","windows":[{"ops":[],"windowLenMs":60000},{"ops":[{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":1}],"totalCount":1},{"opType":"*","topUsers":[{"user":"hiveptest","count":112}],"totalCount":112},{"opType":"delete","topUsers":[{"user":"hiveptest","count":5}],"totalCount":5},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":65}],"totalCount":65},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":18}],"totalCount":18},{"opType":"create","topUsers":[{"user":"hiveptest","count":11}],"totalCount":11},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":2}],"totalCount":2},{"opType":"open","topUsers":[{"user":"hiveptest","count":6}],"totalCount":6}],"windowLenMs":300000},{"ops":[{"opType":"getEZForPath","topUsers":[{"user":"hiveptest","count":12}],"totalCount":12},{"opType":"rename (options=[TO_TRASH])","topUsers":[{"user":"hiveptest","count":1320}],"totalCount":1320},{"opType":"listStatus","topUsers":[{"user":"hiveptest","count":16}],"totalCount":16},{"opType":"*","topUsers":[{"user":"hiveptest","count":10856}],"totalCount":10856},{"opType":"delete","topUsers":[{"user":"hiveptest","count":1361}],"totalCount":1361},{"opType":"getfileinfo","topUsers":[{"user":"hiveptest","count":2119}],"totalCount":2119},{"opType":"rename","topUsers":[{"user":"hiveptest","count":661}],"totalCount":661},{"opType":"mkdirs","topUsers":[{"user":"hiveptest","count":2038}],"totalCount":2038},{"opType":"create","topUsers":[{"user":"hiveptest","count":2649}],"totalCount":2649},{"opType":"setPermission","topUsers":[{"user":"hiveptest","count":4}],"totalCount":4},{"opType":"open","topUsers":[{"user":"hiveptest","count":678}],"totalCount":678}],"windowLenMs":1500000}]} 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:SnapshotStats={"SnapshottableDirectories":0,"Snapshots":0} 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:UnderReplicatedBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceLiveDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityUsed=310917666 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEncryptionZones=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityRemaining=156867330894 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReconstructionBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumEnteringMaintenanceDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomDeadDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FSState=Operational 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:VolumeFailuresTotal=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ProvidedCapacityTotal=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecommissioningDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingReplicationBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncTimes=239 71 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:PendingDeletionBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:CapacityTotal=337115447296 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FilesTotal=788 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:ScheduledReplicationBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDeadDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalSyncCount=14905 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:FsLockQueueLength=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:MaxObjects=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:TotalLoad=10 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumInMaintenanceDeadDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumLiveDataNodes=4 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:BlockDeletionStartTime=1532175593903 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:LowRedundancyBlocks=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumDecomLiveDataNodes=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: FSNamesystemState:NumStaleStorages=0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.port=60399 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterNumOps=3 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateAvgTime=0.46153846153846156 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Context=rpcdetailed 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:RegisterApplicationMasterAvgTime=3.0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:AllocateNumOps=1531 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterAvgTime=0.0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort60399:FinishApplicationMasterNumOps=4 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsNumOps=33 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportNumOps=1 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameAvgTime=0.18454258675078863 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsNumOps=2161 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionAvgTime=0.0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetServerDefaultsAvgTime=0.0 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedAvgTime=0.05405405405405406 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoAvgTime=0.25 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetFileInfoNumOps=2392 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestNumOps=4 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReceivedAndDeletedNumOps=8229 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateNumOps=2732 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.port=35925 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeNumOps=4 2018-07-21T05:49:56,950 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportAvgTime=36.75 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsAvgTime=0.8333333333333334 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2NumOps=1352 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:Rename2AvgTime=0.25 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Context=rpcdetailed 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenameNumOps=678 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetDatanodeReportAvgTime=112.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathNumOps=13 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteAvgTime=2.272727272727273 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetEZForPathAvgTime=0.25 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteNumOps=2804 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingNumOps=35 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetBlockLocationsNumOps=723 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncNumOps=12 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:BlockReportNumOps=4 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:GetListingAvgTime=2.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CompleteAvgTime=0.09090909090909091 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:VersionRequestAvgTime=42.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RegisterDatanodeAvgTime=8.25 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:CreateAvgTime=1.2727272727272727 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockAvgTime=0.5454545454545454 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SetPermissionNumOps=7 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:AddBlockNumOps=2732 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:DeleteNumOps=1409 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:FsyncAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:MkdirsAvgTime=0.45454545454545453 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatNumOps=2400 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:RenewLeaseNumOps=51 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35925:SendHeartbeatAvgTime=0.08375 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemMaxM=1820.5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTerminated=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsWaiting=46 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.SessionId=null 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Context=jvm 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTotalExtraSleepTime=681 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCount=27 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsBlocked=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogWarn=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsRunnable=151 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogInfo=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS Scavenge=22 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsNew=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapUsedM=411.7371 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:ThreadsTimedWaiting=406 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcCountPS MarkSweep=5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:MemHeapMaxM=1820.5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogError=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:tag.ProcessName=DataNode 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:LogFatal=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcTimeMillis=1971 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-4:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:ReceivedBytes=7382 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeAvgTime=0.25 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcSlowCalls=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationSuccesses=4 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumDroppedConnections=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcClientBackoff=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeAvgTime=1.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthorizationFailures=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:NumOpenConnections=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.port=53037 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:SentBytes=3964 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcProcessingTimeNumOps=20 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:CallQueueLength=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Context=rpc 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcAuthenticationFailures=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:RpcQueueTimeNumOps=20 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort53037:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemMaxM=1820.5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTerminated=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsWaiting=46 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.SessionId=null 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Context=jvm 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTotalExtraSleepTime=825 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCount=27 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsBlocked=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogWarn=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsRunnable=151 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogInfo=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS Scavenge=22 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsNew=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapUsedM=411.7371 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:ThreadsTimedWaiting=406 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcCountPS MarkSweep=5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:MemHeapMaxM=1820.5 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogError=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:tag.ProcessName=NameNode 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:LogFatal=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcTimeMillis=1971 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksWrite=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosNumOps=3550 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRemoved=1510 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalAvgTime=0.76 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CacheReportsNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksCached=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsAvgTime=70.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsNumOps=2049 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesRead=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpAvgTime=34722.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionTasks=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncCount=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpNumOps=183 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromLocalClient=2038 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:VolumeFailures=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesRead=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:PacketAckRoundTripTimeNanosAvgTime=1952077.5384615385 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalReadTime=287 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesRead=95568293 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:IncrementalBlockReportsAvgTime=0.18181818181818182 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromRemoteClient=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksUncached=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:DatanodeNetworkErrors=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadsFromLocalClient=183 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksInPendingIBR=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockVerificationFailures=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosNumOps=1807 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBytesWrite=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosNumOps=1807 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.SessionId=null 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockReportsNumOps=1 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BytesWritten=102014337 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksRead=183 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReplicated=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Context=dfs 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksVerified=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WritesFromRemoteClient=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketTransferNanosAvgTime=8491.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsNumOps=600 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksWritten=2038 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:HeartbeatsAvgTime=0.725 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:TotalWriteTime=212 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcDecodingTimeNanos=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:LifelinesNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosNumOps=5616 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:WriteBlockOpNumOps=2038 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FlushNanosAvgTime=25693.243243243243 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:RemoteBytesWritten=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:SendDataPacketBlockedOnNetworkNanosAvgTime=5264.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:CopyBlockOpNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-52570:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalDataFileIos=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalFileIoErrors=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:DataFileIoRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:FlushIoRateNumOps=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Context=dfs 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:TotalMetadataOperations=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data6:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.port=36372 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Context=rpcdetailed 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatAvgTime=1.6 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36372:HeartbeatNumOps=17 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.port=52446 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Context=rpcdetailed 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheCleared=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheHit=0 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Context=rpc 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:CacheUpdated=6171 2018-07-21T05:49:56,951 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.port=53037 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Context=rpcdetailed 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatAvgTime=1.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort53037:HeartbeatNumOps=20 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:ReceivedBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcSlowCalls=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumDroppedConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcClientBackoff=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthorizationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:NumOpenConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.port=52446 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:SentBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:CallQueueLength=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Context=rpc 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcAuthenticationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort52446:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemMaxM=1820.5 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTerminated=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsWaiting=46 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.SessionId=null 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Context=jvm 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapUsedM=259.46378 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTotalExtraSleepTime=123 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCount=27 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsBlocked=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogWarn=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsRunnable=151 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogInfo=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS Scavenge=22 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsNew=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapUsedM=413.15613 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:ThreadsTimedWaiting=406 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcCountPS MarkSweep=5 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:MemHeapMaxM=1820.5 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogError=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:tag.ProcessName=NodeManager 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:LogFatal=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcTimeMillis=1971 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-5:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumFailedVolumes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheCapacity=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:CacheUsed=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:FailedStorageLocations=[Ljava.lang.String;@18e05f8b 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksCached=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:LastVolumeFailureDate=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Capacity=84278861824 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data4]'} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToUncache=1510 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:Remaining=39158743322 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:DfsUsed=101594397 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-36c7baef-5d8d-44ef-8960-e5e2e161c97a:NumBlocksFailedToCache=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:ReceivedBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcSlowCalls=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumDroppedConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcClientBackoff=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthorizationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:NumOpenConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.port=48537 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:SentBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:CallQueueLength=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Context=rpc 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcAuthenticationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort48537:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:ReceivedBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcSlowCalls=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumDroppedConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcClientBackoff=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthorizationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:NumOpenConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.port=38228 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:SentBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:CallQueueLength=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Context=rpc 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcAuthenticationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort38228:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:ReceivedBytes=6820 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeAvgTime=0.4 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcSlowCalls=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationSuccesses=3 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumDroppedConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcClientBackoff=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeAvgTime=1.6 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthorizationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:NumOpenConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.port=36372 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:SentBytes=3872 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcProcessingTimeNumOps=17 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:CallQueueLength=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Context=rpc 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcAuthenticationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:RpcQueueTimeNumOps=17 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36372:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersKilled=2 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedContainers=1 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableVCores=7 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRolledBackOnFailure=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticVCores=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedMemGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersReIniting=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersLaunched=4 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLocalDirs=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PrivateBytesDeleted=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Context=yarn 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationAvgTime=66.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:CacheSizeBeforeClean=94833103 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersFailed=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:TotalBytesDeleted=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedVCores=1 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:BadLogDirs=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersRunning=1 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:RunningOpportunisticContainers=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerLaunchDurationNumOps=4 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersCompleted=1 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedGB=1 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:PublicBytesDeleted=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerUsedVMemGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedVMemGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainerCpuUtilization=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:ContainersIniting=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AllocatedOpportunisticGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:AvailableGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeUsedMemGB=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics-1:NodeCpuUtilization=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalDataFileIos=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalFileIoErrors=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:DataFileIoRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:FlushIoRateNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Context=dfs 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:TotalMetadataOperations=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data5:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:ReceivedBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcSlowCalls=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumDroppedConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcClientBackoff=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthorizationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:NumOpenConnections=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.port=36232 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:SentBytes=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcProcessingTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:CallQueueLength=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Context=rpc 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcAuthenticationFailures=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:RpcQueueTimeNumOps=0 2018-07-21T05:49:56,952 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort36232:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RMNMInfo:LiveNodeManagers=[{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:45107","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:59491","LastHealthUpdate":1532177295524,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":1,"UsedMemoryMB":128,"AvailableMemoryMB":384},{"HostName":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal","Rack":"/default-rack","State":"RUNNING","NodeId":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44235","NodeHTTPAddress":"hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:44572","LastHealthUpdate":1532177294511,"HealthReport":"","NodeManagerVersion":"3.1.0","NumContainers":0,"UsedMemoryMB":0,"AvailableMemoryMB":512}] 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:ReceivedBytes=182319 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeAvgTime=0.07407407407407407 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcSlowCalls=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationSuccesses=9 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumDroppedConnections=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.NumOpenConnectionsPerUser={} 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcClientBackoff=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeAvgTime=1.037037037037037 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthorizationFailures=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:NumOpenConnections=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.port=59658 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:SentBytes=62175 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcProcessingTimeNumOps=95 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationSuccesses=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:CallQueueLength=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Context=rpc 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcAuthenticationFailures=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:RpcQueueTimeNumOps=95 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcActivityForPort59658:DeferredRpcProcessingTimeNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context=dfs 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.2=dfs 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.2=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Context.1=dfs 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname.1=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: NNTopUserOpCounts:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalDataFileIos=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalFileIoErrors=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:DataFileIoRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:FlushIoRateNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Context=dfs 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:TotalMetadataOperations=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumFailedVolumes=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheUsed=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:CacheCapacity=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksCached=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:LastVolumeFailureDate=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Capacity=84278861824 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Context=FSDatasetState 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data1, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data2]'} 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:DfsUsed=100184064 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:Remaining=39382851584 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToCache=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-1:NumBlocksFailedToUnCache=1519 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputBytes=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsFailed=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Context=mapred 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleOutputsOK=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:ShuffleConnections=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics-1:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemMaxM=1820.5 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTerminated=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsWaiting=46 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.SessionId=null 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS MarkSweep=1174 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Context=jvm 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapCommittedM=271.39844 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapUsedM=259.47433 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemNonHeapMaxM=-1.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTotalExtraSleepTime=833 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCount=27 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsBlocked=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogWarn=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapCommittedM=1394.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsRunnable=151 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumWarnThresholdExceeded=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogInfo=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS Scavenge=22 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsNew=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillisPS Scavenge=797 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapUsedM=414.57516 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:ThreadsTimedWaiting=406 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcCountPS MarkSweep=5 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:MemHeapMaxM=1820.5 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogError=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:tag.ProcessName=DataNode 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:LogFatal=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcTimeMillis=1971 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: JvmMetrics-3:GcNumInfoThresholdExceeded=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.port=35555 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Context=rpcdetailed 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort35555:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitVCores=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsCompleted=2 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersPreempted=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedContainers=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableVCores=15 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsSubmitted=3 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingMB=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedCapacity=0.125 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AvailableMB=896 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveApplications=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersReleased=11 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsKilled=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedMB=128 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Context=yarn 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateOffSwitchContainersAllocated=3 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedContainers=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateContainersAllocated=12 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingVCores=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateRackLocalContainersAllocated=9 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AllocatedVCores=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedMB=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ReservedVCores=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_1440=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateVcoreSecondsPreempted=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsFailed=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateNodeLocalContainersAllocated=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_60=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_0=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:ActiveUsers=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AbsoluteUsedCapacity=0.125 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:PendingContainers=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppAttemptFirstContainerAllocationDelayNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceVCores=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsPending=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AggregateMemoryMBSecondsPreempted=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:tag.Queue=root.default 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:UsedAMResourceMB=128 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AMResourceLimitMB=128 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:running_300=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: QueueMetrics,q0=root,q1=default:AppsRunning=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksWrite=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosNumOps=3625 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRemoved=1559 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalAvgTime=0.96 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CacheReportsNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksCached=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsAvgTime=11.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsNumOps=2081 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesRead=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpAvgTime=38576.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionTasks=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncCount=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpNumOps=190 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromLocalClient=2075 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:VolumeFailures=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesRead=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:PacketAckRoundTripTimeNanosAvgTime=279951.76470588235 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalReadTime=230 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesRead=52624181 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:IncrementalBlockReportsAvgTime=0.9 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromRemoteClient=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksUncached=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:DatanodeNetworkErrors=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadsFromLocalClient=190 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksInPendingIBR=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockVerificationFailures=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosNumOps=1170 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBytesWrite=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosNumOps=1170 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.SessionId=null 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockReportsNumOps=1 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BytesWritten=59713967 2018-07-21T05:49:56,953 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksRead=190 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReplicated=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Context=dfs 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksVerified=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WritesFromRemoteClient=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketTransferNanosAvgTime=11977.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsNumOps=600 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksWritten=2075 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:HeartbeatsAvgTime=0.93 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:TotalWriteTime=155 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcDecodingTimeNanos=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:LifelinesNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosNumOps=5063 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:WriteBlockOpNumOps=2075 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FlushNanosAvgTime=5942.685714285714 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:RemoteBytesWritten=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:SendDataPacketBlockedOnNetworkNanosAvgTime=4597.583333333333 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:ReadBlockOpAvgTime=1.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:CopyBlockOpNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-45625:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.port=48537 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Context=rpcdetailed 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort48537:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersKilled=5 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedContainers=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLogDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableVCores=8 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRolledBackOnFailure=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticVCores=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:GoodLocalDirsDiskUtilizationPerc=53 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedMemGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersReIniting=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersLaunched=6 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLocalDirs=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PrivateBytesDeleted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Context=yarn 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationAvgTime=56.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:CacheSizeBeforeClean=94833103 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersFailed=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:TotalBytesDeleted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedVCores=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:BadLogDirs=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersRunning=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:RunningOpportunisticContainers=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerLaunchDurationNumOps=6 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersCompleted=1 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:PublicBytesDeleted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerUsedVMemGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedVMemGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainerCpuUtilization=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:ContainersIniting=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AllocatedOpportunisticGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:AvailableGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeUsedMemGB=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: NodeManagerMetrics:NodeCpuUtilization=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.port=36232 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Context=rpcdetailed 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RpcDetailedActivityForPort36232:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:EstimatedCapacityLostTotal=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumFailedVolumes=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheUsed=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:CacheCapacity=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksCached=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:LastVolumeFailureDate=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Capacity=84278861824 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Context=FSDatasetState 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.StorageInfo=FSDataset{dirpath='[/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data7, /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data8]'} 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:DfsUsed=59065605 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:Remaining=39248584986 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToCache=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: FSDatasetState-3:NumBlocksFailedToUnCache=1559 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheCleared=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheHit=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Context=rpc 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:CacheUpdated=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: RetryCache.NameNodeRetryCache:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessNumOps=1 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailures=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsNumOps=13 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:RenewalFailuresTotal=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Context=ugi 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:GetGroupsAvgTime=11.666666666666666 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginSuccessAvgTime=2.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: UgiMetrics:LoginFailureAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputBytes=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsFailed=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Context=mapred 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleOutputsOK=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:ShuffleConnections=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: ShuffleMetrics:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalDataFileIos=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:MetadataOperationRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalFileIoErrors=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:DataFileIoRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:ReadIoRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FileIoErrorRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:WriteIoRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:FlushIoRateNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Context=dfs 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:TotalMetadataOperations=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:SyncIoRateAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeVolume-/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/dfs/data/data3:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWriteFallback=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksWrite=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersisted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosNumOps=3545 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksDeletedInPendingIBR=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRemoved=1496 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalAvgTime=0.875 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CacheReportsNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksCached=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsAvgTime=65.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DataNodeActiveXceiversCount=1 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsTotalNumOps=600 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsNumOps=2043 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesLazyPersisted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesRead=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksLazyPersistWindowMsNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpAvgTime=55765.666666666664 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionTasks=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncCount=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpNumOps=187 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromLocalClient=2035 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:VolumeFailures=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesRead=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksReadHits=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:PacketAckRoundTripTimeNanosAvgTime=282546.6538461539 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalReadTime=22 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesRead=2912893 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:IncrementalBlockReportsAvgTime=0.2857142857142857 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromRemoteClient=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictedWithoutRead=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksUncached=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:DatanodeNetworkErrors=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadsFromLocalClient=187 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksInPendingIBR=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockVerificationFailures=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosNumOps=413 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBytesWrite=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosNumOps=413 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.SessionId=null 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockReportsNumOps=1 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivedInPendingIBR=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionDecodingTimeMillis=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvictionWindowMsNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BytesWritten=49383234 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionRemoteBytesRead=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcFailedReconstructionTasks=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksRead=187 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReplicated=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Context=dfs 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksVerified=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WritesFromRemoteClient=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksGetLocalPathInfo=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketTransferNanosAvgTime=7679.642857142857 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksDeletedBeforeLazyPersisted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlockChecksumOpNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsNumOps=600 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:tag.Hostname=hive-ptest-slaves-a56.c.gcp-hive-upstream.internal 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksWritten=2035 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:HeartbeatsAvgTime=0.835 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:TotalWriteTime=121 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RamDiskBlocksEvicted=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcDecodingTimeNanos=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionReadTimeMillis=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:LifelinesNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosNumOps=4823 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:WriteBlockOpNumOps=2035 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FlushNanosAvgTime=5254.5 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReplaceBlockOpAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:RemoteBytesWritten=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:FsyncNanosAvgTime=0.0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionBytesWritten=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:SendDataPacketBlockedOnNetworkNanosAvgTime=5233.214285714285 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:ReadBlockOpAvgTime=1.4 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:CopyBlockOpNumOps=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:EcReconstructionWriteTimeMillis=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: DataNodeActivity-127.0.0.1-40780:BlocksReceivingInPendingIBR=0 2018-07-21T05:49:56,954 INFO [pool-46-thread-1] DataNodeMetricsLog: << End DataNode metrics dump 2018-07-21T05:50:04,702 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1341482251 (XID = 203), (SESSIONID = 37), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:04,719 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@288378736 (XID = 1418), (SESSIONID = 57), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:06,076 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1118060173 (XID = 193), (SESSIONID = 27), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:06,081 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1343046987 (XID = 1420), (SESSIONID = 59), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:06,191 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@2040223006 (XID = 197), (SESSIONID = 31), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:06,195 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1637349136 (XID = 1422), (SESSIONID = 61), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:07,122 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1206325076 (XID = 168), (SESSIONID = 3), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:07,127 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@807582212 (XID = 1424), (SESSIONID = 63), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:08,064 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@849388947 (XID = 189), (SESSIONID = 23), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:08,068 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1744440957 (XID = 1426), (SESSIONID = 65), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:09,989 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@356650429 (XID = 176), (SESSIONID = 11), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:09,994 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@931799566 (XID = 1428), (SESSIONID = 67), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:11,953 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1748549951 (XID = 205), (SESSIONID = 39), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:11,958 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@879552151 (XID = 1430), (SESSIONID = 69), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:12,484 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@751628028 (XID = 187), (SESSIONID = 21), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:12,489 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1991714869 (XID = 1432), (SESSIONID = 71), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:14,680 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:50:14,680 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:50:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:50:15,550 DEBUG [ResourceLocalizationService Cache Cleanup] concurrent.ExecutorHelper: afterExecute in thread: ResourceLocalizationService Cache Cleanup, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:50:16,498 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1821920913 (XID = 201), (SESSIONID = 35), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:16,503 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1613991618 (XID = 1434), (SESSIONID = 73), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:16,937 DEBUG [HikariPool-1 connection closer] pool.PoolBase: HikariPool-1 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1970139890 (XID = 172), (SESSIONID = 7), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:16,942 DEBUG [HikariPool-1 connection adder] pool.HikariPool: HikariPool-1 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@1200332469 (XID = 1436), (SESSIONID = 75), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:17,251 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@1939713960 (XID = 191), (SESSIONID = 25), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:17,256 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@2125445115 (XID = 1438), (SESSIONID = 77), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:17,874 DEBUG [HikariPool-2 connection closer] pool.PoolBase: HikariPool-2 - Closing connection org.apache.derby.impl.jdbc.EmbedConnection@402436084 (XID = 199), (SESSIONID = 33), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) : (connection has passed maxLifetime) 2018-07-21T05:50:17,891 DEBUG [HikariPool-2 connection adder] pool.HikariPool: HikariPool-2 - Added connection org.apache.derby.impl.jdbc.EmbedConnection@981373097 (XID = 1440), (SESSIONID = 79), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:50:19,248 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:50:19,310 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:50:49,248 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:50:49,310 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:50:49,732 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 9, retrying in 48,600ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:51:19,249 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:51:19,310 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:51:38,336 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.SQLMetadataConnector: Exception creating table org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:51:38,339 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: Failed org.apache.hadoop.hive.ql.metadata.HiveException: org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:933) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getAllDataSourceNames(DruidStorageHandlerUtils.java:360) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:246) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) ~[hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) ~[?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 58 more 2018-07-21T05:51:38,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,341 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReOptimizePlugin: ReOptimization: retryPossible: false 2018-07-21T05:51:38,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,341 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.OperatorStatsReaderHook: Reading runtime statistics for tez vertex task: Map 1 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.OperatorStatsReaderHook: Reading runtime statistics for tez vertex task: Reducer 2 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,342 ERROR [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,342 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : execution phase 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {} 2018-07-21T05:51:38,342 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed executing command(queryId=hiveptest_20180721054724_528c9b3c-7be7-417a-a334-a953dd9145ab); Time taken: 253.641 seconds 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:51:38,342 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Shutting down query CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: org.apache.hadoop.hive.ql.reexec.ReExecutionOverlayPlugin@3a4f37a8.shouldReExecute = false 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: org.apache.hadoop.hive.ql.reexec.ReOptimizePlugin@514df93c.shouldReExecute = false 2018-07-21T05:51:38,342 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-47-24_536_5880362027653075500-1 2018-07-21T05:51:38,349 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Deleting scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-47-24_536_5880362027653075500-1 2018-07-21T05:51:38,352 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:51:38,352 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:51:38,352 ERROR [main] QTestUtil: Client execution failed with error code = 1 running " CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`, cstring1, cstring2, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1, cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL" fname=druidmini_test1.q See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. 2018-07-21T05:51:38,352 INFO [main] control.CoreCliDriver: Done query druidmini_test1.q. succeeded=false, skipped=false. ElapsedTime(ms)=253819 2018-07-21T05:51:38,356 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000d 2018-07-21T05:51:38,358 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d000d closed 2018-07-21T05:51:38,364 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@2f2de6e0 2018-07-21T05:51:38,365 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:51:38,367 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:38735 2018-07-21T05:51:38,367 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:38735 2018-07-21T05:51:38,368 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:51:38,369 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d000f with negotiated timeout 40000 for client /127.0.0.1:38735 2018-07-21T05:51:38,370 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:51:38,371 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:51:38,371 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:51:38,371 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:51:38,372 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,372 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:51:38,372 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:51:38,372 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:51:38,372 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:51:38,372 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,372 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:51:38,372 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:51:38,372 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:51:38,373 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:51:38,380 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:51:38,382 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:51:38,388 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:51:38,404 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:51:38,405 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:51:38,405 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:51:38,408 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,409 INFO [main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:51:38,409 WARN [main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:51:38,410 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,410 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,410 DEBUG [main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,414 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,415 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,415 DEBUG [main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:51:38,416 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,416 INFO [main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:51:38,416 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,416 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:51:38,416 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:51:38,417 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,417 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,418 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,418 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:51:38,418 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:51:38,419 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,419 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,419 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,419 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:51:38,419 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:51:38,428 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,429 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,429 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,429 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,430 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,439 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,440 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,440 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,440 INFO [main] metastore.HiveMetaStore: 0: get_databases: @hive# 2018-07-21T05:51:38,440 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_databases: @hive# 2018-07-21T05:51:38,441 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,442 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,442 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,442 INFO [main] metastore.HiveMetaStore: 0: get_tables: db=@hive#default pat=.* 2018-07-21T05:51:38,442 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_tables: db=@hive#default pat=.* 2018-07-21T05:51:38,442 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,443 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,443 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,443 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,443 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,451 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,452 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,453 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,453 INFO [main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,453 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,462 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,463 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,463 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,463 INFO [main] metastore.HiveMetaStore: 0: drop_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,463 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=drop_table : tbl=hive.default.druid_partitioned_table_0 2018-07-21T05:51:38,465 DEBUG [main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:51:38,493 DEBUG [main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:51:38,493 DEBUG [main] metastore.ReplChangeManager: Repl policy is not set for database 2018-07-21T05:51:38,494 DEBUG [main] utils.FileUtils: deleting hdfs://localhost:35925/build/ql/test/data/warehouse/druid_partitioned_table_0 2018-07-21T05:51:38,496 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:51:38,496 INFO [main] druid.DruidStorageHandler: Dropping with purge all the data for data source default.druid_partitioned_table_0 2018-07-21T05:51:38,496 DEBUG [main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:51:38,497 INFO [main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:51:38,498 DEBUG [main] common.RetryUtils: Failed on try 1, retrying in 1,110ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:51:39,610 DEBUG [main] common.RetryUtils: Failed on try 2, retrying in 2,123ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:51:41,736 DEBUG [main] common.RetryUtils: Failed on try 3, retrying in 4,712ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:51:46,451 WARN [main] common.RetryUtils: Failed on try 4, retrying in 10,102ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:51:49,249 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:51:49,311 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:51:56,573 WARN [main] common.RetryUtils: Failed on try 5, retrying in 17,983ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:52:14,560 WARN [main] common.RetryUtils: Failed on try 6, retrying in 28,854ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:52:19,249 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:52:19,311 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:52:43,417 WARN [main] common.RetryUtils: Failed on try 7, retrying in 60,941ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:52:49,250 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:52:49,311 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:53:07,211 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0003/recovery/1/summary is closed by DFSClient_NONMAPREDUCE_1237452931_1 2018-07-21T05:53:07,349 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000e 2018-07-21T05:53:07,445 DEBUG [ContainersLauncher #3] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #3, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,573 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,576 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,576 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:09,581 DEBUG [ApplicationMasterLauncher #5] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:53:09,581 DEBUG [ApplicationMasterLauncher #5] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:53:09,582 INFO [Socket Reader #1 for port 45107] ipc.Server: Auth successful for appattempt_1532175606211_0003_000001 (auth:SIMPLE) 2018-07-21T05:53:10,379 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,379 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,379 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,379 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,379 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,379 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,380 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,577 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:10,578 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:53:19,250 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:53:19,311 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:53:44,361 WARN [main] common.RetryUtils: Failed on try 8, retrying in 72,413ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:53:49,250 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:53:49,312 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:54:19,250 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:54:19,312 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:54:49,251 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:54:49,312 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:54:56,777 WARN [main] common.RetryUtils: Failed on try 9, retrying in 63,688ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.inTransaction(DBI.java:329) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$3.call(SQLMetadataConnector.java:158) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryTransaction(SQLMetadataConnector.java:162) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.getDataSegmentList(DruidStorageHandlerUtils.java:581) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.commitDropTable(DruidStorageHandler.java:756) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1380) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1332) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1313) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.dropTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1029) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTablesCreatedDuringTests(QTestUtil.java:896) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.clearTestSideEffects(QTestUtil.java:977) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:124) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver$5.invokeInternal(CoreCliDriver.java:120) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper.invoke(ElapsedTimeLoggingWrapper.java:33) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.tearDown(CoreCliDriver.java:127) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:94) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 52 more 2018-07-21T05:55:19,251 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:55:19,313 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:55:49,251 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:55:49,313 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:56:00,468 INFO [main] control.CoreCliDriver: PerTestTearDown done. ElapsedTime(ms)=262116 2018-07-21T05:56:00,496 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d000f 2018-07-21T05:56:00,498 INFO [main] zookeeper.ZooKeeper: Session: 0x164bcc8430d000f closed 2018-07-21T05:56:00,499 INFO [main] zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:63672 sessionTimeout=1200000 watcher=org.apache.hadoop.hive.ql.QTestUtil$QTestSetup$1@13171215 2018-07-21T05:56:00,502 DEBUG [main] session.SessionState: SessionState user: null 2018-07-21T05:56:00,502 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:38778 2018-07-21T05:56:00,502 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:38778 2018-07-21T05:56:00,504 INFO [main] DependencyResolver: ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR,/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/conf/ivysettings.xml will be used 2018-07-21T05:56:00,504 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:56:00,505 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0010 with negotiated timeout 40000 for client /127.0.0.1:38778 2018-07-21T05:56:00,508 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:56:00,508 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:56:00,508 DEBUG [main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:56:00,508 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:56:00,508 INFO [main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:56:00,508 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:56:00,508 INFO [main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:56:00,508 INFO [main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:56:00,509 DEBUG [main] metrics.PerfLogger: 2018-07-21T05:56:00,509 DEBUG [main] session.SessionState: Removing resource dir /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:56:00,509 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme hdfs 2018-07-21T05:56:00,509 INFO [main] session.SessionState: Deleted directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be on fs with scheme file 2018-07-21T05:56:00,510 DEBUG [main] exec.Utilities: HDFS dir: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir with schema null, permission: rwx-wx-wx 2018-07-21T05:56:00,517 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/ee745c13-27f8-4940-a347-c8307a2da8be_resources 2018-07-21T05:56:00,518 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,525 INFO [main] session.SessionState: Created local directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/localscratchdir/ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,526 INFO [main] session.SessionState: Created HDFS directory: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/_tmp_space.db 2018-07-21T05:56:00,527 WARN [main] session.SessionState: Tez session was already present in SessionState before start: sessionId=ee745c13-27f8-4940-a347-c8307a2da8be, queueName=null, user=hiveptest, doAs=true, isOpen=true, isDefault=false 2018-07-21T05:56:00,527 DEBUG [main] CliDriver: CliDriver inited with classpath /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/test-classes:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/classes:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-common/4.0.0-SNAPSHOT/hive-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-classification/4.0.0-SNAPSHOT/hive-classification-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-shims/4.0.0-SNAPSHOT/hive-shims-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-common/4.0.0-SNAPSHOT/hive-shims-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-0.23/4.0.0-SNAPSHOT/hive-shims-0.23-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/shims/hive-shims-scheduler/4.0.0-SNAPSHOT/hive-shims-scheduler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-storage-api/2.7.0-SNAPSHOT/hive-storage-api-2.7.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-core/1.5.2/orc-core-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/orc/orc-shims/1.5.2/orc-shims-1.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/aircompressor/0.10/aircompressor-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/jline/jline/2.12/jline-2.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-http/9.3.20.v20170531/jetty-http-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-rewrite/9.3.20.v20170531/jetty-rewrite-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-client/9.3.20.v20170531/jetty-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-server/9.3.20.v20170531/jetty-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-io/9.3.20.v20170531/jetty-io-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlet/9.3.20.v20170531/jetty-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-webapp/9.3.20.v20170531/jetty-webapp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-xml/9.3.20.v20170531/jetty-xml-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/tdunning/json/1.8/json-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javolution/javolution/5.5.1/javolution-5.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-contrib/4.0.0-SNAPSHOT/hive-contrib-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-codec/commons-codec/1.7/commons-codec-1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/guava/guava/19.0/guava-19.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-common/4.0.0-SNAPSHOT/hive-standalone-metastore-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/sqlline/sqlline/1.3.0/sqlline-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-standalone-metastore-server/4.0.0-SNAPSHOT/hive-standalone-metastore-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-custom-serde/4.0.0-SNAPSHOT/hive-it-custom-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-unit/4.0.0-SNAPSHOT/hive-it-unit-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc/4.0.0-SNAPSHOT/hive-jdbc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-runner/9.3.20.v20170531/jetty-runner-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-plus/9.3.20.v20170531/jetty-plus-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-annotations/9.3.20.v20170531/jetty-annotations-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jaas/9.3.20.v20170531/jetty-jaas-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-server/9.3.20.v20170531/websocket-server-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-common/9.3.20.v20170531/websocket-common-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-api/9.3.20.v20170531/websocket-api-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-client/9.3.20.v20170531/websocket-client-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/websocket/websocket-servlet/9.3.20.v20170531/websocket-servlet-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-jndi/9.3.20.v20170531/jetty-jndi-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jsp/9.3.20.v20170531/apache-jsp-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/apache-jstl/9.3.20.v20170531/apache-jstl-9.3.20.v20170531.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-ext-client/4.0.0-SNAPSHOT/hive-llap-ext-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-core/4.0.0-SNAPSHOT/hive-hcatalog-core-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-streaming/4.0.0-SNAPSHOT/hive-hcatalog-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-streaming/4.0.0-SNAPSHOT/hive-streaming-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/4.0.0-SNAPSHOT/hive-hcatalog-server-extensions-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jms/jms/1.1/jms-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hcatalog/hive-webhcat-java-client/4.0.0-SNAPSHOT/hive-webhcat-java-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service/4.0.0-SNAPSHOT/hive-service-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-util/4.0.0-SNAPSHOT/hive-it-util-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/beust/jcommander/1.32/jcommander-1.32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-minicluster/3.1.0/hadoop-minicluster-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-accumulo-handler/4.0.0-SNAPSHOT/hive-accumulo-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-cli/4.0.0-SNAPSHOT/hive-cli-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-beeline/4.0.0-SNAPSHOT/hive-beeline-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-hbase-handler/4.0.0-SNAPSHOT/hive-hbase-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-metastore/4.0.0-SNAPSHOT/hive-metastore-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/transaction/jta/1.1/jta-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-serde/4.0.0-SNAPSHOT/hive-serde-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-service-rpc/4.0.0-SNAPSHOT/hive-service-rpc-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-buffer/4.1.17.Final/netty-buffer-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-common/4.1.17.Final/netty-common-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/avro/avro/1.8.2/avro-1.8.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/tukaani/xz/1.5/xz-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/parquet/parquet-hadoop-bundle/1.10.0/parquet-hadoop-bundle-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-vector-code-gen/4.0.0-SNAPSHOT/hive-vector-code-gen-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/oro/oro/2.0.8/oro-2.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-io/commons-io/2.4/commons-io-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-core/1.16.0/calcite-core-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-linq4j/1.16.0/calcite-linq4j-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/calcite-druid/1.16.0/calcite-druid-1.16.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica/1.11.0/avatica-1.11.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-auth/3.1.0/hadoop-auth-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/json-smart/2.3/json-smart-2.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-annotations/3.1.0/hadoop-annotations-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-net/commons-net/3.6/commons-net-3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/dnsjava/dnsjava/2.1.7/dnsjava-2.1.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.9.5/jackson-dataformat-smile-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-hdfs-storage/0.12.1/druid-hdfs-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/mysql-metadata-storage/0.12.1/mysql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/postgresql-metadata-storage/0.12.1/postgresql-metadata-storage-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-druid-handler/4.0.0-SNAPSHOT/hive-druid-handler-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-jdbc-handler/4.0.0-SNAPSHOT/hive-jdbc-handler-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/junit/junit/4.11/junit-4.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-archives/3.1.0/hadoop-archives-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/log4j/log4j/1.2.17/log4j-1.2.17.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/re2j/re2j/1.1/re2j-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-common/3.1.0/hadoop-common-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-distcp/3.1.0/hadoop-distcp-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs/3.1.0/hadoop-hdfs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-common/3.1.0/hadoop-mapreduce-client-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-hs/3.1.0/hadoop-mapreduce-client-hs-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-hdfs-client/3.1.0/hadoop-hdfs-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okhttp/okhttp/2.7.5/okhttp-2.7.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/squareup/okio/okio/1.6.0/okio-1.6.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-app/3.1.0/hadoop-mapreduce-client-app-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.1.0/hadoop-mapreduce-client-shuffle-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-core/3.1.0/hadoop-mapreduce-client-core-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-client/4.0.0-SNAPSHOT/hive-llap-client-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-common/4.0.0-SNAPSHOT/hive-llap-common-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-server/4.0.0-SNAPSHOT/hive-llap-server-4.0.0-SNAPSHOT-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-tests/3.1.0/hadoop-yarn-server-tests-3.1.0-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-common/3.1.0/hadoop-yarn-server-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.1.0/hadoop-yarn-server-nodemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.1.0/hadoop-yarn-server-resourcemanager-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.1.0/hadoop-yarn-server-applicationhistoryservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.1.0/hadoop-yarn-server-timelineservice-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-client/3.1.0/hadoop-yarn-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-common/3.1.0/hadoop-yarn-common-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/guice/4.0/guice-4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/inject/javax.inject/1/javax.inject-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.9.5/jackson-module-jaxb-annotations-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.9.5/jackson-jaxrs-json-provider-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.9.5/jackson-jaxrs-base-2.9.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-api/3.1.0/hadoop-yarn-api-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-client/3.1.0/hadoop-client-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.1.0/hadoop-mapreduce-client-jobclient-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/glassfish/javax.el/3.0.1-b11-SNAPSHOT/javax.el-3.0.1-b11-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/usr/lib/jvm/java-8-openjdk-amd64/jre/../lib/tools.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.1.0/hadoop-yarn-server-web-proxy-3.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-it-druid/4.0.0-SNAPSHOT/hive-it-druid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-server/0.12.1/druid-server-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-processing/0.12.1/druid-processing-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-hll/0.12.1/druid-hll-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extendedset/0.12.1/extendedset-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ning/compress-lzf/1.0.4/compress-lzf-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/skife/config/config-magic/0.9/config-magic-0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm/5.2/asm-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-aws-common/0.12.1/druid-aws-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-ec2/1.10.77/aws-java-sdk-ec2-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/java-util/0.12.1/java-util-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/it/unimi/dsi/fastutil/8.1.0/fastutil-8.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/github/ben-manes/caffeine/caffeine/2.5.5/caffeine-2.5.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-services/0.12.1/druid-services-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-common/0.12.1/druid-common-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-api/0.12.1/druid-api-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/javax/activation/activation/1.1.1/activation-1.1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/iharder/base64/2.3.8/base64-2.3.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-hadoop/0.12.1/druid-indexing-hadoop-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-indexing-service/0.12.1/druid-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/druid-sql/0.12.1/druid-sql-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-core/1.10.0/avatica-core-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-metrics/1.10.0/avatica-metrics-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/calcite/avatica/avatica-server/1.10.0/avatica-server-1.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/airlift/airline/0.7/airline-0.7.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/io/druid/extensions/druid-kafka-indexing-service/0.12.1/druid-kafka-indexing-service-0.12.1.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka-clients/0.10.2.0/kafka-clients-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/kafka/kafka_2.11/0.10.2.0/kafka_2.11-0.10.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/com/101tec/zkclient/0.10/zkclient-0.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-upgrade-acid/4.0.0-SNAPSHOT/hive-upgrade-acid-4.0.0-SNAPSHOT.jar:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/testconf:/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/../../conf: 2018-07-21T05:56:00,527 INFO [main] control.CoreCliDriver: PerTestSetup done. ElapsedTime(ms)=56 2018-07-21T05:56:00,527 INFO [main] control.CoreCliDriver: Begin query: druidmini_test_alter.q 2018-07-21T05:56:00,528 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,528 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:56:00,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:56:00,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:56:00,528 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,528 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:56:00,528 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: true 2018-07-21T05:56:00,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,528 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Resetting thread name to main 2018-07-21T05:56:00,528 INFO [main] conf.HiveConf: Using the default value passed in for log id: ee745c13-27f8-4940-a347-c8307a2da8be 2018-07-21T05:56:00,528 INFO [main] session.SessionState: Updating thread name to ee745c13-27f8-4940-a347-c8307a2da8be main 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Acquired the compile lock. 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] conf.VariableSubstitution: Substitution is on: CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:56:00,530 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Compiling command(queryId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3): CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:56:00,530 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,531 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parsing command: CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:56:00,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.ParseDriver: Parse Completed 2018-07-21T05:56:00,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,532 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,533 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,534 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:56:00,534 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:56:00,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,535 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,537 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:56:00,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,538 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:56:00,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,538 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Starting Semantic Analysis 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] sqlstd.SQLStdHiveAccessController: Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=ee745c13-27f8-4940-a347-c8307a2da8be, clientType=HIVECLI] 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStoreClient: Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2018-07-21T05:56:00,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@5461b718, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:56:00,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:56:00,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:56:00,539 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:56:00,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,539 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:56:00,541 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,541 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:56:00,541 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:56:00,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,542 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,543 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:56:00,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,544 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:56:00,544 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl 2018-07-21T05:56:00,544 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_alltypesorc_n0 position=22 2018-07-21T05:56:00,545 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:56:00,545 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,546 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:56:00,547 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,548 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:56:00,548 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,548 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_alltypesorc_n0 2018-07-21T05:56:00,548 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_alltypesorc_n0 2018-07-21T05:56:00,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,549 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,549 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed phase 1 of Semantic Analysis 2018-07-21T05:56:00,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:56:00,550 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,550 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,557 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:56:00,558 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:56:00,558 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,559 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,559 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,560 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: Created staging dir = hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1 for path = hdfs://localhost:35925/build/ql/test/data/warehouse 2018-07-21T05:56:00,560 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] common.FileUtils: Creating directory if it doesn't exist: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1 2018-07-21T05:56:00,561 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis 2018-07-21T05:56:00,563 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,564 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,564 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,564 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_not_null_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,565 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,565 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,565 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,566 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,566 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,566 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_primary_keys : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,567 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,567 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_unique_constraints : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,567 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,568 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:56:00,568 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_foreign_keys : parentdb=null parenttbl=null foreigndb=default foreigntbl=alltypesorc 2018-07-21T05:56:00,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,568 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,570 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:56:00,571 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($7):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$8]) HiveFilter(condition=[IS NOT NULL($7)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], ctimestamp2=[$9], cboolean1=[$10]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan before removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,572 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan just after removing subquery: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Plan after decorrelation: HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,573 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,588 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] calcite.sql2rel: Plan after trimming unused fields HiveProject(__time=[CAST($7):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$8]) HiveFilter(condition=[IS NOT NULL($7)]) HiveProject(ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cfloat=[$4], cdouble=[$5], cstring1=[$6], ctimestamp2=[$9], cboolean1=[$10]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,589 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,590 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,591 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,603 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Original plan for PlanModifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after nested convertOpTree HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after propagating order HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Plan after fixTopOBSchema HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] translator.PlanModifierForASTConv: Final plan after modifier HiveProject(__time=[CAST($9):TIMESTAMP_WITH_LOCAL_TIME_ZONE(15)], cstring1=[$6], cdouble=[$5], cfloat=[$4], ctinyint=[$0], csmallint=[$1], cint=[$2], cbigint=[$3], cboolean1=[$10]) HiveFilter(condition=[IS NOT NULL($9)]) HiveTableScan(table=[[default, alltypesorc]], table:alias=[alltypesorc]) 2018-07-21T05:56:00,604 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Creating table default.druid_alltypesorc_n0 position=22 2018-07-21T05:56:00,604 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.druid_alltypesorc_n0 2018-07-21T05:56:00,605 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.druid_alltypesorc_n0 2018-07-21T05:56:00,605 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,606 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,606 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,606 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for source tables 2018-07-21T05:56:00,607 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,607 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table : tbl=hive.default.alltypesorc 2018-07-21T05:56:00,612 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for subqueries 2018-07-21T05:56:00,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Get metadata for destination tables 2018-07-21T05:56:00,613 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,613 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,614 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Table Plan for alltypesorc TS[0] 2018-07-21T05:56:00,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Filter Plan for null row schema: alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:56:00,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: RR before GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} after GB alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} 2018-07-21T05:56:00,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: tree: (tok_select (tok_selexpr (tok_function tok_timestamplocaltz (. (tok_table_or_col alltypesorc) ctimestamp2)) __time) (tok_selexpr (. (tok_table_or_col alltypesorc) cstring1) cstring1) (tok_selexpr (. (tok_table_or_col alltypesorc) cdouble) cdouble) (tok_selexpr (. (tok_table_or_col alltypesorc) cfloat) cfloat) (tok_selexpr (. (tok_table_or_col alltypesorc) ctinyint) ctinyint) (tok_selexpr (. (tok_table_or_col alltypesorc) csmallint) csmallint) (tok_selexpr (. (tok_table_or_col alltypesorc) cint) cint) (tok_selexpr (. (tok_table_or_col alltypesorc) cbigint) cbigint) (tok_selexpr (. (tok_table_or_col alltypesorc) cboolean1) cboolean1)) 2018-07-21T05:56:00,615 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: genSelectPlan: input = alltypesorc{(ctinyint,ctinyint: tinyint)(csmallint,csmallint: smallint)(cint,cint: int)(cbigint,cbigint: bigint)(cfloat,cfloat: float)(cdouble,cdouble: double)(cstring1,cstring1: string)(cstring2,cstring2: string)(ctimestamp1,ctimestamp1: timestamp)(ctimestamp2,ctimestamp2: timestamp)(cboolean1,cboolean1: boolean)(cboolean2,cboolean2: boolean)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct)} starRr = null 2018-07-21T05:56:00,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cdouble,_col2: double)(cfloat,_col3: float)(ctinyint,_col4: tinyint)(csmallint,_col5: smallint)(cint,_col6: int)(cbigint,_col7: bigint)(cboolean1,_col8: boolean)} 2018-07-21T05:56:00,616 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Select Plan for clause: insclause-0 2018-07-21T05:56:00,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1] types: [timestamp with local time zone('US/Pacific'), string, double, float, tinyint, smallint, int, bigint, boolean] 2018-07-21T05:56:00,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1] types: [timestamp with local time zone('US/Pacific'), string, double, float, tinyint, smallint, int, bigint, boolean] 2018-07-21T05:56:00,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Set stats collection dir : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10003 2018-07-21T05:56:00,617 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,617 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,617 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,618 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10001 row schema: null{(__time,_col0: timestamp with local time zone)(cstring1,_col1: string)(cdouble,_col2: double)(cfloat,_col3: float)(ctinyint,_col4: tinyint)(csmallint,_col5: smallint)(cint,_col6: int)(cbigint,_col7: bigint)(cboolean1,_col8: boolean)} 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Body Plan for Query Block null 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Created Plan for Query Block null 2018-07-21T05:56:00,619 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: CBO Succeeded; optimized logical plan. 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Before logical optimization TS[0]-FIL[1]-SEL[2]-FS[3] 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lineage.Generator: Time taken for lineage transform=0 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.PartitionColumnsSeparator: Partition columns not separated for null, is not IN operator : 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,619 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FS(3) 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for SEL(2) 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for FIL(1) 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of FIL for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Processing for TS(0) 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.OpProcFactory: Pushdown predicates of TS for alias alltypesorc: ctimestamp2 is not null 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] ppd.SimplePredicatePushDown: After PPD: TS[0]-FIL[4]-SEL[2]-FS[3] 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Sorted dynamic partitioning on time granularity optimization kicked in... 2018-07-21T05:56:00,620 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SortedDynPartitionTimeGranularityOptimizer: Inserted SEL_5, RS_6 and SEL_7 as parent of FS_3 and child of SEL_2 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,620 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 6 key:[Column[__time_granularity]] 2018-07-21T05:56:00,621 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 oldColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8]} 2018-07-21T05:56:00,621 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ColumnPrunerProcFactory: RS 6 newColExprMap: {VALUE._col2=Column[_col2], VALUE._col3=Column[_col3], VALUE._col4=Column[_col4], VALUE._col5=Column[_col5], VALUE._col0=Column[_col0], VALUE._col1=Column[_col1], KEY.__time_granularity=Column[__time_granularity], VALUE._col6=Column[_col6], VALUE._col7=Column[_col7], VALUE._col8=Column[_col8]} 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.BucketingSortingReduceSinkOptimizer$BucketSortReduceSinkProcessor: Reduce Sink is added by Sorted Dynamic Partition Optimizer. Bailing out of Bucketing Sorting Reduce Sink Optimizer 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: After logical optimization TS[0]-FIL[4]-SEL[2]-SEL[5]-RS[6]-SEL[7]-FS[3] 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Parent: TS[0] 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: Filter: ctimestamp2 is not null (type: boolean) 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.DynamicPartitionPruningOptimization: TableScan: TS[0] 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,621 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1]) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1,__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1,__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[KEY.__time_granularity]) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] stats.BasicStats: Estimated average row size: 504 2018-07-21T05:56:00,622 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,622 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:56:00,622 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_table_statistics_req: table=hive.default.alltypesorc 2018-07-21T05:56:00,631 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Direct SQL query in 4.043076ms + 0.00818ms, the query is [select "COLUMN_NAME", "COLUMN_TYPE", "LONG_LOW_VALUE", "LONG_HIGH_VALUE", "DOUBLE_LOW_VALUE", "DOUBLE_HIGH_VALUE", "BIG_DECIMAL_LOW_VALUE", "BIG_DECIMAL_HIGH_VALUE", "NUM_NULLS", "NUM_DISTINCTS", "BIT_VECTOR", "AVG_COL_LEN", "MAX_COL_LEN", "NUM_TRUES", "NUM_FALSES", "LAST_ANALYZED" from "TAB_COL_STATS" where "CAT_NAME" = ? and "DB_NAME" = ? and "TABLE_NAME" = ? and "COLUMN_NAME" in (...)] 2018-07-21T05:56:00,633 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0] 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-TS[0] (alltypesorc): numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FIL[4]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {cint= colName: cint colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, ctimestamp2= colName: ctimestamp2 colType: timestamp countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, csmallint= colName: csmallint colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, cdouble= colName: cdouble colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, cboolean1= colName: cboolean1 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, ctinyint= colName: ctinyint colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, cfloat= colName: cfloat colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, cstring1= colName: cstring1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, cbigint= colName: cbigint colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on FIL[4] 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[2] 2018-07-21T05:56:00,634 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[2]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[5] 2018-07-21T05:56:00,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[5]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col0= colName: _col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col2= colName: _col2 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, _col1= colName: _col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, _col4= colName: _col4 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, _col3= colName: _col3 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, _col6= colName: _col6 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, _col5= colName: _col5 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, _col8= colName: _col8 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false, _col7= colName: _col7 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on RS[6] 2018-07-21T05:56:00,635 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-RS[6]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting stats (Num rows: 12288 Data size: 1684250 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[7] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-SEL[7]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: double countDistincts: 5527 numNulls: 3114 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: float countDistincts: 131 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: tinyint countDistincts: 130 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: smallint countDistincts: 5666 numNulls: 3114 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 3114 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: int countDistincts: 6104 numNulls: 3115 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: bigint countDistincts: 5917 numNulls: 3115 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: boolean countDistincts: 2 numNulls: 3114 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] annotation.StatsRulesProcFactory: [0] STATS-FS[3]: numRows: 12288 dataSize: 1684250 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col2= colName: VALUE._col2 colType: double countDistincts: 5527 numNulls: 6228 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -16379.0 max: 9763215.5639 ] isPrimaryKey: false isEstimated: false, VALUE._col3= colName: VALUE._col3 colType: float countDistincts: 131 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64.0 max: 79.5530014038086 ] isPrimaryKey: false isEstimated: false, VALUE._col4= colName: VALUE._col4 colType: tinyint countDistincts: 130 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -64 max: 62 ] isPrimaryKey: false isEstimated: false, VALUE._col5= colName: VALUE._col5 colType: smallint countDistincts: 5666 numNulls: 6228 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -16379 max: 16376 ] isPrimaryKey: false isEstimated: false, VALUE._col0= colName: VALUE._col0 colType: timestamp with local time zone countDistincts: 0 numNulls: 0 avgColLen: 40.0 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col1= colName: VALUE._col1 colType: string countDistincts: 5979 numNulls: 6228 avgColLen: 10.406982421875 numTrues: 0 numFalses: 0 isPrimaryKey: false isEstimated: false, VALUE._col6= colName: VALUE._col6 colType: int countDistincts: 6104 numNulls: 6230 avgColLen: 4.0 numTrues: 0 numFalses: 0 Range: [ min: -1073279343 max: 1073680599 ] isPrimaryKey: false isEstimated: false, VALUE._col7= colName: VALUE._col7 colType: bigint countDistincts: 5917 numNulls: 6230 avgColLen: 8.0 numTrues: 0 numFalses: 0 Range: [ min: -2147311592 max: 2145498388 ] isPrimaryKey: false isEstimated: false, VALUE._col8= colName: VALUE._col8 colType: boolean countDistincts: 2 numNulls: 6228 avgColLen: 4.0 numTrues: 6138 numFalses: 3036 isPrimaryKey: false isEstimated: false} 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.TableScanOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on TS[0] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FilterOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on FIL[4] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: []; sort column names: []; bucket count: -1; bucketing version: 2 }) on SEL[2] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[5] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.ReduceSinkOperator: Setting traits ({ bucket column names: [[]]; sort column names: [[]]; bucket count: -1; bucketing version: 2 }) on RS[6] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SelectOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on SEL[7] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.FileSinkOperator: Setting traits ({ bucket column names: null; sort column names: null; bucket count: -1; bucketing version: 2 }) on FS[3] 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.SetReducerParallelism: Set parallelism for reduce sink RS[6] to: 1 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,636 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FS, 3 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 7 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: RS, 6 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 5 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: SEL, 2 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: FIL, 4 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Component: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Operator: TS, 0 2018-07-21T05:56:00,637 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Cycle free: true 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,cstring2: string|{alltypesorc}cstring2,ctimestamp1: timestamp|{alltypesorc}ctimestamp1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1,cboolean2: boolean|{alltypesorc}cboolean2,BLOCK__OFFSET__INSIDE__FILE: bigint|{alltypesorc}block__offset__inside__file,INPUT__FILE__NAME: string|{alltypesorc}input__file__name,ROW__ID: struct|{alltypesorc}row__id) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FIL[4] with rs:(ctinyint: tinyint|{alltypesorc}ctinyint,csmallint: smallint|{alltypesorc}csmallint,cint: int|{alltypesorc}cint,cbigint: bigint|{alltypesorc}cbigint,cfloat: float|{alltypesorc}cfloat,cdouble: double|{alltypesorc}cdouble,cstring1: string|{alltypesorc}cstring1,ctimestamp2: timestamp|{alltypesorc}ctimestamp2,cboolean1: boolean|{alltypesorc}cboolean1) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op TS_0 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FIL[4] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: Old filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New filter FIL[4] conditions:ctimestamp2 is not null 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[2] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op FIL_4 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[2] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(GenericUDFToTimestampLocalTZ(Column[ctimestamp2]) Column[cstring1] Column[cdouble] Column[cfloat] Column[ctinyint] Column[csmallint] Column[cint] Column[cbigint] Column[cboolean1]) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1,__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_2 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[5] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0] Column[_col1] Column[_col2] Column[_col3] Column[_col4] Column[_col5] Column[_col6] Column[_col7] Column[_col8] GenericUDFBridge ==> floor_hour (GenericUDFTimestamp(GenericUDFEpochMilli(Column[_col0])))) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[6] with rs:(VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp with local time zone|{null}__time,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: string|{null}cstring1,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: double|{null}cdouble,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: float|{null}cfloat,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: tinyint|{null}ctinyint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: smallint|{null}csmallint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: int|{null}cint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: bigint|{null}cbigint,VALUE.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: boolean|{null}cboolean1,KEY.org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc: timestamp|{null}) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_5 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator RS[6] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[7] with rs:(_col0: timestamp with local time zone|{null}__time,_col1: string|{null}cstring1,_col2: double|{null}cdouble,_col3: float|{null}cfloat,_col4: tinyint|{null}ctinyint,_col5: smallint|{null}csmallint,_col6: int|{null}cint,_col7: bigint|{null}cbigint,_col8: boolean|{null}cboolean1,__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op RS_6 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator SEL[7] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcFactory: New column list:(Column[VALUE._col0] Column[VALUE._col1] Column[VALUE._col2] Column[VALUE._col3] Column[VALUE._col4] Column[VALUE._col5] Column[VALUE._col6] Column[VALUE._col7] Column[VALUE._col8] Column[KEY.__time_granularity]) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[3] with rs:(__time: timestamp with local time zone|{},cstring1: string|{},cdouble: double|{},cfloat: float|{},ctinyint: tinyint|{},csmallint: smallint|{},cint: int|{},cbigint: bigint|{},cboolean1: boolean|{},__time_granularity: timestamp|{null}) 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Constant of Op SEL_7 {} 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.ConstantPropagateProcCtx: Offering constants [] to operator FS[3] 2018-07-21T05:56:00,637 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,637 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,637 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,638 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,639 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,639 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: get_database: @hive#default 2018-07-21T05:56:00,639 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=get_database: @hive#default 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: getDatabase: directsql returning db default locn[hdfs://localhost:35925/build/ql/test/data/warehouse] desc [Default Hive database] owner [public] ownertype [ROLE] 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: TS[0] 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: RS[6] 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding map work (Map 1) for TS[0] 2018-07-21T05:56:00,640 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Adding hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc of table alltypesorc 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] optimizer.GenMapRedUtils: Information added for path hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: RS[6] 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Root operator: SEL[7] 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Leaf operator: FS[3] 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Adding reduce work (Reducer 2) for SEL[7] 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezUtils: Setting up reduce sink: RS[6] with following reduce work: Reducer 2 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: Removing RS[6] as parent from SEL[7] 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.GenTezWork: First pass. Leaf operator: FS[3] 2018-07-21T05:56:00,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_alltypesorc_n0 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: There are 0 app master events. 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking at: Map 1 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.NullScanTaskDispatcher: Found 0 null table scans 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping metadata only query optimization 2018-07-21T05:56:00,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapPreVectorizationPass: LLAP disabled. 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of DependencyCollectionTask 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of StatsTask 2018-07-21T05:56:00,641 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Ignoring vectorization of MoveTask 2018-07-21T05:56:00,641 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Examining input format to see if vectorization is enabled. 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Vectorization is enabled for input format(s) [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing MapWork... (vectorizedVertexNum 0) 2018-07-21T05:56:00,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] vector.VectorizationContext: Input Expression = GenericUDFOPNotNull(Column[ctimestamp2]), Vectorized Expression = SelectColumnIsNotNull(col 9:timestamp) 2018-07-21T05:56:00,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: vectorizeOperator org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator 2018-07-21T05:56:00,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: vectorizeOperator org.apache.hadoop.hive.ql.plan.FilterDesc 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorization enabled: true 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorized: false 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map notVectorizedReason: Select expression for SELECT operator: Vectorizing data type timestamp with local time zone not supported 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map vectorizedVertexNum: 0 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map enabledConditionsMet: [hive.vectorized.use.vectorized.input.format IS true] 2018-07-21T05:56:00,642 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Map inputFileFormatClassNameSet: [org.apache.hadoop.hive.ql.io.orc.OrcInputFormat] 2018-07-21T05:56:00,642 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Using reduce tag 0 2018-07-21T05:56:00,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] lazybinary.LazyBinarySerDe: LazyBinarySerDe initialized with: columnNames=[_col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8] columnTypes=[timestamp with local time zone('US/Pacific'), string, double, float, tinyint, smallint, int, bigint, boolean] 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Validating and vectorizing ReduceWork... (vectorizedVertexNum 1) 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorization enabled: true 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorized: false 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce notVectorizedReason: Select expression for SELECT operator: Vectorizing data type timestamp with local time zone not supported 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reduce vectorizedVertexNum: 1 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer hive.vectorized.execution.reduce.enabled: true 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.Vectorizer: Reducer engine: tez 2018-07-21T05:56:00,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.TezCompiler: Skipping stage id rearranger 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: llap mode: none 2018-07-21T05:56:00,643 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.LlapDecider: LLAP disabled. 2018-07-21T05:56:00,643 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp2 is not null 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] physical.SerializeFilter: Serializing: ctimestamp2 is not null 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: Completed plan generation 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Semantic Analysis Completed (retrial = false) 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: validation start 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:__time, type:timestamp with local time zone, comment:null), FieldSchema(name:cstring1, type:string, comment:null), FieldSchema(name:cdouble, type:double, comment:null), FieldSchema(name:cfloat, type:float, comment:null), FieldSchema(name:ctinyint, type:tinyint, comment:null), FieldSchema(name:csmallint, type:smallint, comment:null), FieldSchema(name:cint, type:int, comment:null), FieldSchema(name:cbigint, type:bigint, comment:null), FieldSchema(name:cboolean1, type:boolean, comment:null)], properties:null) 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Dumping metastore api call timing information for : compilation phase 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Total time spent in each metastore function (ms): {getNotNullConstraints_(NotNullConstraintsRequest, )=2, getUniqueConstraints_(UniqueConstraintsRequest, )=0, getPrimaryKeys_(PrimaryKeysRequest, )=2, getTableColumnStatistics_(String, String, List, )=12, getForeignKeys_(ForeignKeysRequest, )=1} 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Completed compiling command(queryId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3); Time taken: 0.114 seconds 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] reexec.ReExecDriver: Execution #1 of query 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:56:00,644 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,644 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Executing command(queryId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3): CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:56:00,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1 FROM alltypesorc where ctimestamp2 IS NOT NULL 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: type: CREATETABLE_AS_SELECT 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Input: default@alltypesorc 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: database:default 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: PREHOOK: Output: default@druid_alltypesorc_n0 2018-07-21T05:56:00,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Query ID = hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Total jobs = 1 2018-07-21T05:56:00,645 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Launching Job 1 out of 1 2018-07-21T05:56:00,645 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-1:MAPRED] in serial mode 2018-07-21T05:56:00,658 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] security.ShellBasedUnixGroupsMapping: unable to return groups for user hive_test_user org.apache.hadoop.security.ShellBasedUnixGroupsMapping$PartialGroupNameException: The user name 'hive_test_user' is not found. id: hive_test_user: no such user id: hive_test_user: no such user at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.resolvePartialGroupNames(ShellBasedUnixGroupsMapping.java:294) ~[hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getUnixGroups(ShellBasedUnixGroupsMapping.java:207) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.ShellBasedUnixGroupsMapping.getGroups(ShellBasedUnixGroupsMapping.java:97) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback.getGroups(JniBasedUnixGroupsMappingWithFallback.java:51) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.fetchGroupList(Groups.java:384) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:319) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.Groups$GroupCacheLoader.load(Groups.java:269) [hadoop-common-3.1.0.jar:?] at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3542) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2323) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2286) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2201) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.get(LocalCache.java:3953) [guava-19.0.jar:?] at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3957) [guava-19.0.jar:?] at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4875) [guava-19.0.jar:?] at org.apache.hadoop.security.Groups.getGroups(Groups.java:227) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.security.UserGroupInformation.getGroups(UserGroupInformation.java:1540) [hadoop-common-3.1.0.jar:?] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:168) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:56:00,710 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-56-00_530_1174748063339984724-1 2018-07-21T05:56:00,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: TezDir path set hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-56-00_530_1174748063339984724-1/hiveptest/_tez_scratch_dir for user: hiveptest 2018-07-21T05:56:00,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.WorkloadManagerFederation: Using unmanaged session - WM is not initialized 2018-07-21T05:56:00,710 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: The current user: hiveptest, session user: hiveptest 2018-07-21T05:56:00,710 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionPoolManager: Current queue name is null incoming queue name is null 2018-07-21T05:56:00,710 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,710 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Subscribed to counters: [] for queryId: hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3 2018-07-21T05:56:00,710 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Session is already open 2018-07-21T05:56:00,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Adding local resource: scheme: "hdfs" host: "localhost" port: 35925 file: "/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources/hive-druid-handler-4.0.0-SNAPSHOT.jar" 2018-07-21T05:56:00,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,711 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Dag name: CREATE EXTERNAL TABLE druid_alltypeso...NULL (Stage-1) 2018-07-21T05:56:00,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: DagInfo: {"context":"Hive","description":"\nCREATE EXTERNAL TABLE druid_alltypesorc_n0\nSTORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'\nTBLPROPERTIES (\"druid.segment.granularity\" = \"HOUR\", \"druid.query.granularity\" = \"MINUTE\")\nAS\n SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`,\ncstring1,\ncdouble,\ncfloat,\nctinyint,\ncsmallint,\ncint,\ncbigint,\ncboolean1\nFROM alltypesorc where ctimestamp2 IS NOT NULL"} 2018-07-21T05:56:00,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezTask: Setting Tez DAG access for queryId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3 with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:56:00,711 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,713 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,713 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing ReduceWork using kryo 2018-07-21T05:56:00,714 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,714 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Reducer 2 size: 2.81KB 2018-07-21T05:56:00,720 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: Initing FSStatsPublisher with : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10003 2018-07-21T05:56:00,721 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] fs.FSStatsPublisher: created : hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10003 2018-07-21T05:56:00,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10001 2018-07-21T05:56:00,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking ReduceWork output URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10002 2018-07-21T05:56:00,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,726 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,727 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Context: New scratch dir is hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/ee745c13-27f8-4940-a347-c8307a2da8be/hive_2018-07-21_05-56-00_530_1174748063339984724-1 2018-07-21T05:56:00,728 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Vertex has custom input? false 2018-07-21T05:56:00,728 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,728 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.SerializationUtilities: Serializing MapWork using kryo 2018-07-21T05:56:00,730 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,730 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Utilities: Serialized plan (via RPC) - name: Map 1 size: 5.08KB 2018-07-21T05:56:00,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Marking MapWork input URI as needing credentials: hdfs://localhost:35925/build/ql/test/data/warehouse/alltypesorc 2018-07-21T05:56:00,743 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,745 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:00,745 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0003, dagName=CREATE EXTERNAL TABLE druid_alltypeso...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3 } 2018-07-21T05:56:00,755 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0003, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0003/, diagnostics=Session timed out, lastDAGCompletionTime=1532177265378 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 2018-07-21T05:56:00,755 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Tez session was closed. Reopening... 2018-07-21T05:56:00,755 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Closing Tez Session 2018-07-21T05:56:00,755 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Shutting down Tez Session, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0003 2018-07-21T05:56:00,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Application not running, applicationId=application_1532175606211_0003, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0003/, diagnostics=Session timed out, lastDAGCompletionTime=1532177265378 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 2018-07-21T05:56:00,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Failed to shutdown Tez Session via proxy org.apache.tez.dag.api.SessionNotRunning: Application not running, applicationId=application_1532175606211_0003, yarnApplicationState=FINISHED, finalApplicationStatus=SUCCEEDED, trackingUrl=http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0003/, diagnostics=Session timed out, lastDAGCompletionTime=1532177265378 ms, sessionTimeoutInterval=300000 ms Session stats:submittedDAGs=1, successfulDAGs=1, failedDAGs=0, killedDAGs=0 at org.apache.tez.client.TezClientUtils.getAMProxy(TezClientUtils.java:901) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.getAMProxy(TezClient.java:958) ~[tez-api-0.9.1.jar:0.9.1] at org.apache.tez.client.TezClient.stop(TezClient.java:641) [tez-api-0.9.1.jar:0.9.1] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.closeClient(TezSessionState.java:706) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.close(TezSessionState.java:673) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopenInternal(TezSessionPoolManager.java:492) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager.reopen(TezSessionPoolManager.java:483) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.reopen(TezSessionState.java:931) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.getNewTezSessionOnError(TezTask.java:530) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.submit(TezTask.java:546) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:220) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] 2018-07-21T05:56:00,757 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Could not connect to AM, killing session via YARN, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0003 2018-07-21T05:56:00,758 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Killed application application_1532175606211_0003 2018-07-21T05:56:00,758 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state STOPPED 2018-07-21T05:56:00,759 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Attemting to clean up resources for ee745c13-27f8-4940-a347-c8307a2da8be: null 2018-07-21T05:56:00,759 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: User of session id ee745c13-27f8-4940-a347-c8307a2da8be is hiveptest 2018-07-21T05:56:00,760 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting resources to hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be-resources; 1 additional files, 1 localized resources 2018-07-21T05:56:00,761 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:56:00,761 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,762 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,763 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:56:00,763 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-llap-tez/4.0.0-SNAPSHOT/hive-llap-tez-4.0.0-SNAPSHOT.jar] is hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:56:00,765 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629385 for hdfs://localhost:35925/user/hive/hive-llap-tez-4.0.0-SNAPSHOT-d0015b7c5d6b04cebd6660e975f7034d0950696b7bd2a2afab404d399f20708f.jar 2018-07-21T05:56:00,765 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:56:00,766 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,767 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,768 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:56:00,769 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hive/hive-exec/4.0.0-SNAPSHOT/hive-exec-4.0.0-SNAPSHOT.jar] is hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,769 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629287 for hdfs://localhost:35925/user/hive/hive-exec-4.0.0-SNAPSHOT-92950f4060ac09b78597b905995d377f3dc63c7e3a53e72d4fd2a0d92a7bce91.jar 2018-07-21T05:56:00,770 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Hive jar directory is hdfs://localhost:35925/user/hive 2018-07-21T05:56:00,770 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: The destination file name for [file:/home/hiveptest/35.226.158.22-hiveptest-0/maven/org/apache/hadoop/hadoop-yarn-registry/3.1.0/hadoop-yarn-registry-3.1.0.jar] is hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:56:00,771 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.DagUtils: Resource modification time: 1532175629464 for hdfs://localhost:35925/user/hive/hadoop-yarn-registry-3.1.0-8cfd91cae159b9e8048dfa54fa0f7e5c9415d88ffd0672f95cfeb7f79cb91e6b.jar 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=10, tez(original):tez.runtime.io.sort.mb=24, tez(final):tez.runtime.io.sort.mb=24 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=5, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=5 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.05, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.05 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=10, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=10 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=20000, tez(final):tez.runtime.shuffle.connect.timeout=20000 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=0.4, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.4 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.node-blacklisting.enable, mr initial value=false, tez(original):tez.am.node-blacklisting.enabled=false, tez(final):tez.am.node-blacklisting.enabled=false 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=1024, tez(final):tez.counters.max=1024 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2018-07-21T05:56:00,788 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2018-07-21T05:56:00,793 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Setting Tez Session access for sessionId=ee745c13-27f8-4940-a347-c8307a2da8be with viewAclString=hive_test_user,hiveptest, modifyStr=hive_test_user,hiveptest 2018-07-21T05:56:00,794 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez Client Version: [ component=tez-api, version=0.9.1, revision=23b58b2b996eee255aab1a045412de00677ca2f1, SCM-URL=scm:git:https://git-wip-us.apache.org/repos/asf/tez.git, buildTime=2017-12-13T00:06:01Z ] 2018-07-21T05:56:00,794 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] tez.TezSessionState: Opening new Tez Session (id: ee745c13-27f8-4940-a347-c8307a2da8be, scratch dir: hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be) 2018-07-21T05:56:00,794 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service: org.apache.hadoop.yarn.client.api.impl.YarnClientImpl entered state INITED 2018-07-21T05:56:00,808 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.RMProxy: Connecting to ResourceManager at hive-ptest-slaves-a56.c.gcp-hive-upstream.internal/10.128.0.18:59658 2018-07-21T05:56:00,809 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] service.AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl is started 2018-07-21T05:56:00,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Session mode. Starting session. 2018-07-21T05:56:00,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris value from configuration: hdfs://localhost:35925/user/hiveptest/target/hive-tmpDir/TezAppJar.jar 2018-07-21T05:56:00,809 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClientUtils: Using tez.lib.uris.classpath value from configuration: null 2018-07-21T05:56:00,817 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Tez system stage directory hdfs://localhost:35925/home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004 doesn't exist and is created 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:56:00,827 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:56:00,827 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744557_3733, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/tez-conf.pb 2018-07-21T05:56:00,851 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/tez-conf.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:56:00,854 DEBUG [IPC Server handler 7 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:56:00,854 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744558_3734, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/tez.session.local-resources.pb 2018-07-21T05:56:00,859 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/tez.session.local-resources.pb is closed by DFSClient_NONMAPREDUCE_680435605_1 2018-07-21T05:56:00,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] impl.YarnClientImpl: Submitted application application_1532175606211_0004 2018-07-21T05:56:00,874 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: The url to track the Tez Session: http://hive-ptest-slaves-a56.c.gcp-hive-upstream.internal:0/proxy/application_1532175606211_0004/ 2018-07-21T05:56:01,464 DEBUG [ApplicationMasterLauncher #6] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:56:01,465 DEBUG [ApplicationMasterLauncher #6] security.LlapServerSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.yarn.api.ContainerManagementProtocolPB 2018-07-21T05:56:01,467 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0004_000001 (auth:SIMPLE) 2018-07-21T05:56:01,487 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0004 2018-07-21T05:56:01,627 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting hdfs 2018-07-21T05:56:01,627 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.hdfs.impl 2018-07-21T05:56:01,627 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:56:01,627 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem 2018-07-21T05:56:01,629 DEBUG [ContainerLocalizer Downloader] retry.RetryUtils: multipleLinearRandomRetry = null 2018-07-21T05:56:01,631 DEBUG [ContainerLocalizer Downloader] endpoint.LlapPluginSecurityInfo: Trying to get TokenInfo for interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB 2018-07-21T05:56:01,636 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking for FS supporting file 2018-07-21T05:56:01,636 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: looking for configuration option fs.file.impl 2018-07-21T05:56:01,636 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: Looking in service filesystems for implementation class 2018-07-21T05:56:01,636 DEBUG [ContainerLocalizer Downloader] fs.FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 2018-07-21T05:56:01,742 DEBUG [ContainersLauncher #6] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #6, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:56:02,746 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:02,747 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:08,175 INFO [Socket Reader #1 for port 60399] ipc.Server: Auth successful for appattempt_1532175606211_0004_000001 (auth:SIMPLE) 2018-07-21T05:56:08,292 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.NIOServerCnxnFactory: Accepted socket connection from /127.0.0.1:38793 2018-07-21T05:56:08,295 INFO [NIOServerCxn.Factory:0.0.0.0/0.0.0.0:63672] server.ZooKeeperServer: Client attempting to establish new session at /127.0.0.1:38793 2018-07-21T05:56:08,297 INFO [SyncThread:0] server.ZooKeeperServer: Established session 0x164bcc8430d0011 with negotiated timeout 40000 for client /127.0.0.1:38793 2018-07-21T05:56:08,486 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:56:08,486 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Session re-established. 2018-07-21T05:56:08,486 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitting dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0004, dagName=CREATE EXTERNAL TABLE druid_alltypeso...NULL (Stage-1), callerContext={ context=HIVE, callerType=HIVE_QUERY_ID, callerId=hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3 } 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:40780] 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:40780] 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Node 127.0.0.1:40780 is excluded, continuing. 2018-07-21T05:56:08,671 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:08,671 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744559_3735, replicas=127.0.0.1:40780, 127.0.0.1:33099, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/summary 2018-07-21T05:56:08,776 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/summary for DFSClient_NONMAPREDUCE_1688365745_1 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:56:08,791 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:08,792 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744560_3736, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/dag_1532175606211_0004_1.recovery 2018-07-21T05:56:08,817 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: BLOCK* fsync: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/dag_1532175606211_0004_1.recovery for DFSClient_NONMAPREDUCE_1688365745_1 2018-07-21T05:56:08,915 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] client.TezClient: Submitted dag to TezSession, sessionName=HIVE-ee745c13-27f8-4940-a347-c8307a2da8be, applicationId=application_1532175606211_0004, dagId=dag_1532175606211_0004_1, dagName=CREATE EXTERNAL TABLE druid_alltypeso...NULL (Stage-1) 2018-07-21T05:56:08,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:08,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:08,915 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:09,922 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:09,922 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] SessionState: Status: Running (Executing on YARN cluster with App id application_1532175606211_0004) 2018-07-21T05:56:09,942 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:56:11,708 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0004_000001 (auth:SIMPLE) 2018-07-21T05:56:11,728 INFO [NM ContainerManager dispatcher] mapred.ShuffleHandler: Added token for job_1532175606211_0004 2018-07-21T05:56:11,729 DEBUG [ContainersLauncher #7] concurrent.HadoopThreadPoolExecutor: beforeExecute in thread: ContainersLauncher #7, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:56:12,969 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0/1 Reducer 2: 0/1 2018-07-21T05:56:15,997 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:15,997 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 0(+1)/1 Reducer 2: 0/1 2018-07-21T05:56:19,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:19,022 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:19,022 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 0(+1)/1 2018-07-21T05:56:19,252 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:56:19,313 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:56:20,465 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:52570] 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:52570] 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:56:20,466 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:56:20,466 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744561_3737, replicas=127.0.0.1:52570, 127.0.0.1:45625, 127.0.0.1:33099 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/16cbaa4f1928448f9a0a302e7773ca09/0_descriptor.json 2018-07-21T05:56:20,508 INFO [IPC Server handler 7 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/16cbaa4f1928448f9a0a302e7773ca09/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:56:20,527 DEBUG [IPC Server handler 0 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,527 INFO [IPC Server handler 0 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744562_3738, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/19691231T230000.000Z_19700101T000000.000Z/2018-07-21T05_56_00.617-07_00/0_index.zip 2018-07-21T05:56:20,540 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/16cbaa4f1928448f9a0a302e7773ca09/0_index.zip is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: Node 127.0.0.1:52570 is excluded, continuing. 2018-07-21T05:56:20,563 DEBUG [IPC Server handler 2 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:20,563 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744563_3739, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/segmentsDescriptorDir/default.druid_alltypesorc_n0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T055600.617-0700.json 2018-07-21T05:56:20,571 INFO [IPC Server handler 6 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/segmentsDescriptorDir/default.druid_alltypesorc_n0_1969-12-31T230000.000Z_1970-01-01T000000.000Z_2018-07-21T055600.617-0700.json is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,757 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,757 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099] 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:52570] 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: Node 127.0.0.1:33099 is excluded, continuing. 2018-07-21T05:56:20,758 DEBUG [IPC Server handler 3 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:45625 2018-07-21T05:56:20,758 INFO [IPC Server handler 3 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744564_3740, replicas=127.0.0.1:33099, 127.0.0.1:52570, 127.0.0.1:45625 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/44529882b87e46439872c50d2d5094b9/0_descriptor.json 2018-07-21T05:56:20,765 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/44529882b87e46439872c50d2d5094b9/0_descriptor.json is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,769 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:56:20,770 DEBUG [IPC Server handler 5 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,770 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744565_3741, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/19700101T000000.000Z_19700101T010000.000Z/2018-07-21T05_56_00.617-07_00/0_index.zip 2018-07-21T05:56:20,777 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/intermediateSegmentDir/default.druid_alltypesorc_n0/44529882b87e46439872c50d2d5094b9/0_index.zip is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:40780 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625, 127.0.0.1:40780] 2018-07-21T05:56:20,784 DEBUG [IPC Server handler 9 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,784 INFO [IPC Server handler 9 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744566_3742, replicas=127.0.0.1:45625, 127.0.0.1:40780, 127.0.0.1:52570 for /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/segmentsDescriptorDir/default.druid_alltypesorc_n0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T055600.617-0700.json 2018-07-21T05:56:20,791 INFO [IPC Server handler 8 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druidStagingDir/.staging-hiveptest_20180721055600_25f20c32-875d-41f2-bea8-a50e595c1ef3/segmentsDescriptorDir/default.druid_alltypesorc_n0_1970-01-01T000000.000Z_1970-01-01T010000.000Z_2018-07-21T055600.617-0700.json is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 3 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:45625] 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Node 127.0.0.1:45625 is excluded, continuing. 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:33099 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Failed to find datanode (scope="" excludedScope="/default-rack"). 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: No node to choose. 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: Choosing random from 2 available nodes on node /default-rack, scope=/default-rack, excludedScope=null, excludeNodes=[127.0.0.1:33099, 127.0.0.1:45625] 2018-07-21T05:56:20,810 DEBUG [IPC Server handler 4 on 35925] net.NetworkTopology: chooseRandom returning 127.0.0.1:52570 2018-07-21T05:56:20,811 INFO [IPC Server handler 4 on 35925] hdfs.StateChange: BLOCK* allocate blk_1073744567_3743, replicas=127.0.0.1:45625, 127.0.0.1:33099, 127.0.0.1:52570 for /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10003/tmpstats-0_FS_3 2018-07-21T05:56:20,818 INFO [IPC Server handler 5 on 35925] hdfs.StateChange: DIR* completeFile: /build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10003/tmpstats-0_FS_3 is closed by DFSClient_attempt_15321756062111_0004_r_000000_0_1895843447_30 2018-07-21T05:56:20,856 INFO [IPC Server handler 2 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/dag_1532175606211_0004_1.recovery is closed by DFSClient_NONMAPREDUCE_1688365745_1 2018-07-21T05:56:20,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:20,866 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] monitoring.RenderStrategy$LogToFileFunction: Map 1: 1/1 Reducer 2: 1/1 2018-07-21T05:56:20,866 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-2:DEPENDENCY_COLLECTION] in serial mode 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-0:MOVE] in serial mode 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Creating new db. db = org.apache.hadoop.hive.ql.metadata.Hive@7a1d454d, needsRefresh = false, db.isCurrentUserOwner = true 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Cleaning up thread local RawStore... 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Cleaning up thread local RawStore... 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] HiveMetaStore.audit: ugi=hiveptest ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metadata.Hive: Closing current thread's connection to Hive Metastore. 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] FileOperations: MoveTask moving hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10002 to hdfs://localhost:35925/build/ql/test/data/warehouse/druid_alltypesorc_n0 2018-07-21T05:56:20,871 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:20,871 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.Task: Moving data to directory hdfs://localhost:35925/build/ql/test/data/warehouse/druid_alltypesorc_n0 from hdfs://localhost:35925/build/ql/test/data/warehouse/.hive-staging_hive_2018-07-21_05-56-00_530_1174748063339984724-1/-ext-10002 2018-07-21T05:56:20,879 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] log.PerfLogger: 2018-07-21T05:56:20,880 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] ql.Driver: Starting task [Stage-4:DDL] in serial mode 2018-07-21T05:56:20,880 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] plan.CreateTableDesc: Use StorageHandler-supplied org.apache.hadoop.hive.druid.serde.DruidSerDe for table druid_alltypesorc_n0 2018-07-21T05:56:20,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] exec.DDLTask: creating table default.druid_alltypesorc_n0 on null 2018-07-21T05:56:20,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1] types: [timestamp with local time zone('US/Pacific'), string, double, float, tinyint, smallint, int, bigint, boolean] 2018-07-21T05:56:20,880 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] serde.DruidSerDe: DruidSerDe initialized with columns: [__time, cstring1, cdouble, cfloat, ctinyint, csmallint, cint, cbigint, cboolean1] types: [timestamp with local time zone('US/Pacific'), string, double, float, tinyint, smallint, int, bigint, boolean] 2018-07-21T05:56:20,881 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:20,882 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore 2018-07-21T05:56:20,882 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.ObjectStore: datanucleus.autoStartMechanismMode is set to unsupported value null . Setting it to value: ignored 2018-07-21T05:56:20,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:20,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:20,882 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-2 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@1399938644 (XID = 1402), (SESSIONID = 41), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:20,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:20,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] pool.PoolBase: HikariPool-1 - Reset (autoCommit) on connection org.apache.derby.impl.jdbc.EmbedConnection@2032660523 (XID = 1404), (SESSIONID = 43), (DATABASE = /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/junit_metastore_db), (DRDAID = null) 2018-07-21T05:56:20,884 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY 2018-07-21T05:56:20,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] metrics.PerfLogger: 2018-07-21T05:56:20,885 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] metastore.RetryingMetaStoreClient: RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hiveptest (auth:SIMPLE) retries=1 delay=1 lifetime=0 2018-07-21T05:56:20,885 DEBUG [ee745c13-27f8-4940-a347-c8307a2da8be main] druid.DruidStorageHandler: Supplying SQL Connector with DB type derby, URI jdbc:derby://localhost:1527//home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/druid-data/druid_derby/metadata.db, User 2018-07-21T05:56:20,886 INFO [ee745c13-27f8-4940-a347-c8307a2da8be main] derby.DerbyConnector: Derby connector instantiated with metadata storage [org.apache.hive.druid.io.druid.metadata.storage.derby.DerbyMetadataStorage]. 2018-07-21T05:56:20,887 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 1, retrying in 1,002ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:56:21,891 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 2, retrying in 2,570ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:56:24,464 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 3, retrying in 4,738ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:56:27,096 INFO [Socket Reader #1 for port 44235] ipc.Server: Auth successful for appattempt_1532175606211_0004_000001 (auth:SIMPLE) 2018-07-21T05:56:27,121 WARN [ContainersLauncher #7] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0004_01_000002 is : 143 2018-07-21T05:56:27,123 DEBUG [ContainersLauncher #7] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #7, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:56:29,206 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 4, retrying in 9,035ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:56:29,240 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,240 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,240 DEBUG [DeletionService #0] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,240 DEBUG [DeletionService #3] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,240 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,240 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #1] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #2] concurrent.HadoopScheduledThreadPoolExecutor: beforeExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #3] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #3, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,241 DEBUG [DeletionService #2] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #2, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,243 DEBUG [DeletionService #1] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #1, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:29,245 DEBUG [DeletionService #0] concurrent.ExecutorHelper: afterExecute in thread: DeletionService #0, runnable type: java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask 2018-07-21T05:56:38,244 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 5, retrying in 18,489ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:56:49,252 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:56:49,314 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:56:56,736 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 6, retrying in 31,989ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:57:19,252 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:57:19,314 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:57:28,728 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 7, retrying in 69,431ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:57:49,253 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:57:49,314 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:58:19,253 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:58:19,315 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:58:38,170 WARN [ee745c13-27f8-4940-a347-c8307a2da8be main] common.RetryUtils: Failed on try 8, retrying in 80,082ms. org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.skife.jdbi.v2.DBI.open(DBI.java:230) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.withHandle(DBI.java:279) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector$2.call(SQLMetadataConnector.java:135) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:63) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.java.util.common.RetryUtils.retry(RetryUtils.java:81) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:139) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.retryWithHandle(SQLMetadataConnector.java:148) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createTable(SQLMetadataConnector.java:189) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:261) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hive.druid.io.druid.metadata.SQLMetadataConnector.createSegmentTable(SQLMetadataConnector.java:545) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:240) [hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:843) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:833) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212) [hive-standalone-metastore-server-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at com.sun.proxy.$Proxy124.createTable(Unknown Source) [?:?] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:938) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4922) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:392) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2695) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2347) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2017) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1717) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1711) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) [hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) [hive-cli-4.0.0-SNAPSHOT.jar:?] at org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1339) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1313) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:171) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) [test-classes/:?] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_102] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_102] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_102] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_102] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) [junit-4.11.jar:?] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.11.jar:?] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) [junit-4.11.jar:?] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) [junit-4.11.jar:?] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?] at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) [junit-4.11.jar:?] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) [junit-4.11.jar:?] at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) [hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.junit.rules.RunRules.evaluate(RunRules.java:20) [junit-4.11.jar:?] at org.junit.runners.ParentRunner.run(ParentRunner.java:309) [junit-4.11.jar:?] at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159) [surefire-junit4-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125) [surefire-booter-2.21.0.jar:2.21.0] at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413) [surefire-booter-2.21.0.jar:2.21.0] Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused.) at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2152) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.am.SQLExceptionFactory.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: org.apache.derby.client.am.DisconnectException: java.net.ConnectException : Error connecting to server localhost on port 1,527 with message Connection refused. at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more Caused by: java.net.ConnectException: Connection refused at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_102] at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_102] at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_102] at java.net.Socket.connect(Socket.java:538) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:434) ~[?:1.8.0_102] at java.net.Socket.(Socket.java:211) ~[?:1.8.0_102] at javax.net.DefaultSocketFactory.createSocket(SocketFactory.java:271) ~[?:1.8.0_102] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.OpenSocketAction.run(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_102] at org.apache.derby.client.net.NetAgent.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.newAgent_(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.am.ClientConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.NetConnection.(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.client.net.ClientJDBCObjectFactoryImpl.newNetConnection(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:?] at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:205) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2162) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2148) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:1903) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1413) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DataSourceConnectionFactory.openConnection(DataSourceConnectionFactory.java:36) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] at org.skife.jdbi.v2.DBI.open(DBI.java:212) ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT] ... 77 more 2018-07-21T05:58:49,253 DEBUG [HikariPool-1 housekeeper] pool.HikariPool: HikariPool-1 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:58:49,315 DEBUG [HikariPool-2 housekeeper] pool.HikariPool: HikariPool-2 - Pool stats (total=10, active=0, idle=10, waiting=0) 2018-07-21T05:59:14,974 WARN [ContainersLauncher #6] nodemanager.DefaultContainerExecutor: Exit code from container container_1532175606211_0004_01_000001 is : 143 2018-07-21T05:59:14,974 DEBUG [ContainersLauncher #6] concurrent.ExecutorHelper: afterExecute in thread: ContainersLauncher #6, runnable type: java.util.concurrent.FutureTask 2018-07-21T05:59:14,996 INFO [pool-7-thread-1] lifecycle.Lifecycle$AnnotationBasedHandler: Invoking stop method[public void org.apache.hive.druid.com.metamx.http.client.NettyHttpClient.stop()] on object[org.apache.hive.druid.com.metamx.http.client.NettyHttpClient@2bab73fd]. 2018-07-21T05:59:15,000 INFO [IPC Server handler 1 on 35925] hdfs.StateChange: DIR* completeFile: /home/hiveptest/35.226.158.22-hiveptest-0/apache-github-source-source/itests/qtest/target/tmp/scratchdir/hiveptest/_tez_session_dir/ee745c13-27f8-4940-a347-c8307a2da8be/.tez/application_1532175606211_0004/recovery/1/summary is closed by DFSClient_NONMAPREDUCE_1688365745_1 2018-07-21T05:59:15,034 INFO [ProcessThread(sid:0 cport:-1):] server.PrepRequestProcessor: Processed session termination for sessionid: 0x164bcc8430d0011 2018-07-21T05:59:15,035 DEBUG [pool-7-thread-1] log.PerfLogger: 2018-07-21T05:59:15,039 INFO [pool-7-thread-1] ql.Driver: Concurrency mode is disabled, not creating a lock manager 2018-07-21T05:59:15,040 DEBUG [Thread-43] util.ShutdownHookManager: ShutdownHookManger complete shutdown.